1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::{SettingsUi, WorktreeId};
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
111 pending_autoindent: Option<Task<()>>,
112 sync_parse_timeout: Duration,
113 syntax_map: Mutex<SyntaxMap>,
114 reparse: Option<Task<()>>,
115 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
116 non_text_state_update_count: usize,
117 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
118 remote_selections: TreeMap<ReplicaId, SelectionSet>,
119 diagnostics_timestamp: clock::Lamport,
120 completion_triggers: BTreeSet<String>,
121 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
122 completion_triggers_timestamp: clock::Lamport,
123 deferred_ops: OperationQueue<Operation>,
124 capability: Capability,
125 has_conflict: bool,
126 /// Memoize calls to has_changes_since(saved_version).
127 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
128 has_unsaved_edits: Cell<(clock::Global, bool)>,
129 change_bits: Vec<rc::Weak<Cell<bool>>>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(
178 Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
179)]
180#[serde(rename_all = "snake_case")]
181pub enum CursorShape {
182 /// A vertical bar
183 #[default]
184 Bar,
185 /// A block that surrounds the following character
186 Block,
187 /// An underline that runs along the following character
188 Underline,
189 /// A box drawn around the following character
190 Hollow,
191}
192
193#[derive(Clone, Debug)]
194struct SelectionSet {
195 line_mode: bool,
196 cursor_shape: CursorShape,
197 selections: Arc<[Selection<Anchor>]>,
198 lamport_timestamp: clock::Lamport,
199}
200
201/// A diagnostic associated with a certain range of a buffer.
202#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
203pub struct Diagnostic {
204 /// The name of the service that produced this diagnostic.
205 pub source: Option<String>,
206 /// A machine-readable code that identifies this diagnostic.
207 pub code: Option<NumberOrString>,
208 pub code_description: Option<lsp::Uri>,
209 /// Whether this diagnostic is a hint, warning, or error.
210 pub severity: DiagnosticSeverity,
211 /// The human-readable message associated with this diagnostic.
212 pub message: String,
213 /// The human-readable message (in markdown format)
214 pub markdown: Option<String>,
215 /// An id that identifies the group to which this diagnostic belongs.
216 ///
217 /// When a language server produces a diagnostic with
218 /// one or more associated diagnostics, those diagnostics are all
219 /// assigned a single group ID.
220 pub group_id: usize,
221 /// Whether this diagnostic is the primary diagnostic for its group.
222 ///
223 /// In a given group, the primary diagnostic is the top-level diagnostic
224 /// returned by the language server. The non-primary diagnostics are the
225 /// associated diagnostics.
226 pub is_primary: bool,
227 /// Whether this diagnostic is considered to originate from an analysis of
228 /// files on disk, as opposed to any unsaved buffer contents. This is a
229 /// property of a given diagnostic source, and is configured for a given
230 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
231 /// for the language server.
232 pub is_disk_based: bool,
233 /// Whether this diagnostic marks unnecessary code.
234 pub is_unnecessary: bool,
235 /// Quick separation of diagnostics groups based by their source.
236 pub source_kind: DiagnosticSourceKind,
237 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
238 pub data: Option<Value>,
239 /// Whether to underline the corresponding text range in the editor.
240 pub underline: bool,
241}
242
243#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
244pub enum DiagnosticSourceKind {
245 Pulled,
246 Pushed,
247 Other,
248}
249
250/// An operation used to synchronize this buffer with its other replicas.
251#[derive(Clone, Debug, PartialEq)]
252pub enum Operation {
253 /// A text operation.
254 Buffer(text::Operation),
255
256 /// An update to the buffer's diagnostics.
257 UpdateDiagnostics {
258 /// The id of the language server that produced the new diagnostics.
259 server_id: LanguageServerId,
260 /// The diagnostics.
261 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 },
265
266 /// An update to the most recent selections in this buffer.
267 UpdateSelections {
268 /// The selections.
269 selections: Arc<[Selection<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 /// Whether the selections are in 'line mode'.
273 line_mode: bool,
274 /// The [`CursorShape`] associated with these selections.
275 cursor_shape: CursorShape,
276 },
277
278 /// An update to the characters that should trigger autocompletion
279 /// for this buffer.
280 UpdateCompletionTriggers {
281 /// The characters that trigger autocompletion.
282 triggers: Vec<String>,
283 /// The buffer's lamport timestamp.
284 lamport_timestamp: clock::Lamport,
285 /// The language server ID.
286 server_id: LanguageServerId,
287 },
288
289 /// An update to the line ending type of this buffer.
290 UpdateLineEnding {
291 /// The line ending type.
292 line_ending: LineEnding,
293 /// The buffer's lamport timestamp.
294 lamport_timestamp: clock::Lamport,
295 },
296}
297
298/// An event that occurs in a buffer.
299#[derive(Clone, Debug, PartialEq)]
300pub enum BufferEvent {
301 /// The buffer was changed in a way that must be
302 /// propagated to its other replicas.
303 Operation {
304 operation: Operation,
305 is_local: bool,
306 },
307 /// The buffer was edited.
308 Edited,
309 /// The buffer's `dirty` bit changed.
310 DirtyChanged,
311 /// The buffer was saved.
312 Saved,
313 /// The buffer's file was changed on disk.
314 FileHandleChanged,
315 /// The buffer was reloaded.
316 Reloaded,
317 /// The buffer is in need of a reload
318 ReloadNeeded,
319 /// The buffer's language was changed.
320 LanguageChanged,
321 /// The buffer's syntax trees were updated.
322 Reparsed,
323 /// The buffer's diagnostics were updated.
324 DiagnosticsUpdated,
325 /// The buffer gained or lost editing capabilities.
326 CapabilityChanged,
327}
328
329/// The file associated with a buffer.
330pub trait File: Send + Sync + Any {
331 /// Returns the [`LocalFile`] associated with this file, if the
332 /// file is local.
333 fn as_local(&self) -> Option<&dyn LocalFile>;
334
335 /// Returns whether this file is local.
336 fn is_local(&self) -> bool {
337 self.as_local().is_some()
338 }
339
340 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
341 /// only available in some states, such as modification time.
342 fn disk_state(&self) -> DiskState;
343
344 /// Returns the path of this file relative to the worktree's root directory.
345 fn path(&self) -> &Arc<Path>;
346
347 /// Returns the path of this file relative to the worktree's parent directory (this means it
348 /// includes the name of the worktree's root folder).
349 fn full_path(&self, cx: &App) -> PathBuf;
350
351 /// Returns the last component of this handle's absolute path. If this handle refers to the root
352 /// of its worktree, then this method will return the name of the worktree itself.
353 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
354
355 /// Returns the id of the worktree to which this file belongs.
356 ///
357 /// This is needed for looking up project-specific settings.
358 fn worktree_id(&self, cx: &App) -> WorktreeId;
359
360 /// Converts this file into a protobuf message.
361 fn to_proto(&self, cx: &App) -> rpc::proto::File;
362
363 /// Return whether Zed considers this to be a private file.
364 fn is_private(&self) -> bool;
365}
366
367/// The file's storage status - whether it's stored (`Present`), and if so when it was last
368/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
369/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
370/// indicator for new files.
371#[derive(Copy, Clone, Debug, PartialEq)]
372pub enum DiskState {
373 /// File created in Zed that has not been saved.
374 New,
375 /// File present on the filesystem.
376 Present { mtime: MTime },
377 /// Deleted file that was previously present.
378 Deleted,
379}
380
381impl DiskState {
382 /// Returns the file's last known modification time on disk.
383 pub fn mtime(self) -> Option<MTime> {
384 match self {
385 DiskState::New => None,
386 DiskState::Present { mtime } => Some(mtime),
387 DiskState::Deleted => None,
388 }
389 }
390
391 pub fn exists(&self) -> bool {
392 match self {
393 DiskState::New => false,
394 DiskState::Present { .. } => true,
395 DiskState::Deleted => false,
396 }
397 }
398}
399
400/// The file associated with a buffer, in the case where the file is on the local disk.
401pub trait LocalFile: File {
402 /// Returns the absolute path of this file
403 fn abs_path(&self, cx: &App) -> PathBuf;
404
405 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
406 fn load(&self, cx: &App) -> Task<Result<String>>;
407
408 /// Loads the file's contents from disk.
409 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
410}
411
412/// The auto-indent behavior associated with an editing operation.
413/// For some editing operations, each affected line of text has its
414/// indentation recomputed. For other operations, the entire block
415/// of edited text is adjusted uniformly.
416#[derive(Clone, Debug)]
417pub enum AutoindentMode {
418 /// Indent each line of inserted text.
419 EachLine,
420 /// Apply the same indentation adjustment to all of the lines
421 /// in a given insertion.
422 Block {
423 /// The original indentation column of the first line of each
424 /// insertion, if it has been copied.
425 ///
426 /// Knowing this makes it possible to preserve the relative indentation
427 /// of every line in the insertion from when it was copied.
428 ///
429 /// If the original indent column is `a`, and the first line of insertion
430 /// is then auto-indented to column `b`, then every other line of
431 /// the insertion will be auto-indented to column `b - a`
432 original_indent_columns: Vec<Option<u32>>,
433 },
434}
435
436#[derive(Clone)]
437struct AutoindentRequest {
438 before_edit: BufferSnapshot,
439 entries: Vec<AutoindentRequestEntry>,
440 is_block_mode: bool,
441 ignore_empty_lines: bool,
442}
443
444#[derive(Debug, Clone)]
445struct AutoindentRequestEntry {
446 /// A range of the buffer whose indentation should be adjusted.
447 range: Range<Anchor>,
448 /// Whether or not these lines should be considered brand new, for the
449 /// purpose of auto-indent. When text is not new, its indentation will
450 /// only be adjusted if the suggested indentation level has *changed*
451 /// since the edit was made.
452 first_line_is_new: bool,
453 indent_size: IndentSize,
454 original_indent_column: Option<u32>,
455}
456
457#[derive(Debug)]
458struct IndentSuggestion {
459 basis_row: u32,
460 delta: Ordering,
461 within_error: bool,
462}
463
464struct BufferChunkHighlights<'a> {
465 captures: SyntaxMapCaptures<'a>,
466 next_capture: Option<SyntaxMapCapture<'a>>,
467 stack: Vec<(usize, HighlightId)>,
468 highlight_maps: Vec<HighlightMap>,
469}
470
471/// An iterator that yields chunks of a buffer's text, along with their
472/// syntax highlights and diagnostic status.
473pub struct BufferChunks<'a> {
474 buffer_snapshot: Option<&'a BufferSnapshot>,
475 range: Range<usize>,
476 chunks: text::Chunks<'a>,
477 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
478 error_depth: usize,
479 warning_depth: usize,
480 information_depth: usize,
481 hint_depth: usize,
482 unnecessary_depth: usize,
483 underline: bool,
484 highlights: Option<BufferChunkHighlights<'a>>,
485}
486
487/// A chunk of a buffer's text, along with its syntax highlight and
488/// diagnostic status.
489#[derive(Clone, Debug, Default)]
490pub struct Chunk<'a> {
491 /// The text of the chunk.
492 pub text: &'a str,
493 /// The syntax highlighting style of the chunk.
494 pub syntax_highlight_id: Option<HighlightId>,
495 /// The highlight style that has been applied to this chunk in
496 /// the editor.
497 pub highlight_style: Option<HighlightStyle>,
498 /// The severity of diagnostic associated with this chunk, if any.
499 pub diagnostic_severity: Option<DiagnosticSeverity>,
500 /// Whether this chunk of text is marked as unnecessary.
501 pub is_unnecessary: bool,
502 /// Whether this chunk of text was originally a tab character.
503 pub is_tab: bool,
504 /// A bitset of which characters are tabs in this string.
505 pub tabs: u128,
506 /// Bitmap of character indices in this chunk
507 pub chars: u128,
508 /// Whether this chunk of text was originally a tab character.
509 pub is_inlay: bool,
510 /// Whether to underline the corresponding text range in the editor.
511 pub underline: bool,
512}
513
514/// A set of edits to a given version of a buffer, computed asynchronously.
515#[derive(Debug)]
516pub struct Diff {
517 pub base_version: clock::Global,
518 pub line_ending: LineEnding,
519 pub edits: Vec<(Range<usize>, Arc<str>)>,
520}
521
522#[derive(Debug, Clone, Copy)]
523pub(crate) struct DiagnosticEndpoint {
524 offset: usize,
525 is_start: bool,
526 underline: bool,
527 severity: DiagnosticSeverity,
528 is_unnecessary: bool,
529}
530
531/// A class of characters, used for characterizing a run of text.
532#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
533pub enum CharKind {
534 /// Whitespace.
535 Whitespace,
536 /// Punctuation.
537 Punctuation,
538 /// Word.
539 Word,
540}
541
542/// A runnable is a set of data about a region that could be resolved into a task
543pub struct Runnable {
544 pub tags: SmallVec<[RunnableTag; 1]>,
545 pub language: Arc<Language>,
546 pub buffer: BufferId,
547}
548
549#[derive(Default, Clone, Debug)]
550pub struct HighlightedText {
551 pub text: SharedString,
552 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
553}
554
555#[derive(Default, Debug)]
556struct HighlightedTextBuilder {
557 pub text: String,
558 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
559}
560
561impl HighlightedText {
562 pub fn from_buffer_range<T: ToOffset>(
563 range: Range<T>,
564 snapshot: &text::BufferSnapshot,
565 syntax_snapshot: &SyntaxSnapshot,
566 override_style: Option<HighlightStyle>,
567 syntax_theme: &SyntaxTheme,
568 ) -> Self {
569 let mut highlighted_text = HighlightedTextBuilder::default();
570 highlighted_text.add_text_from_buffer_range(
571 range,
572 snapshot,
573 syntax_snapshot,
574 override_style,
575 syntax_theme,
576 );
577 highlighted_text.build()
578 }
579
580 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
581 gpui::StyledText::new(self.text.clone())
582 .with_default_highlights(default_style, self.highlights.iter().cloned())
583 }
584
585 /// Returns the first line without leading whitespace unless highlighted
586 /// and a boolean indicating if there are more lines after
587 pub fn first_line_preview(self) -> (Self, bool) {
588 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
589 let first_line = &self.text[..newline_ix];
590
591 // Trim leading whitespace, unless an edit starts prior to it.
592 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
593 if let Some((first_highlight_range, _)) = self.highlights.first() {
594 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
595 }
596
597 let preview_text = &first_line[preview_start_ix..];
598 let preview_highlights = self
599 .highlights
600 .into_iter()
601 .take_while(|(range, _)| range.start < newline_ix)
602 .filter_map(|(mut range, highlight)| {
603 range.start = range.start.saturating_sub(preview_start_ix);
604 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
605 if range.is_empty() {
606 None
607 } else {
608 Some((range, highlight))
609 }
610 });
611
612 let preview = Self {
613 text: SharedString::new(preview_text),
614 highlights: preview_highlights.collect(),
615 };
616
617 (preview, self.text.len() > newline_ix)
618 }
619}
620
621impl HighlightedTextBuilder {
622 pub fn build(self) -> HighlightedText {
623 HighlightedText {
624 text: self.text.into(),
625 highlights: self.highlights,
626 }
627 }
628
629 pub fn add_text_from_buffer_range<T: ToOffset>(
630 &mut self,
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) {
637 let range = range.to_offset(snapshot);
638 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
639 let start = self.text.len();
640 self.text.push_str(chunk.text);
641 let end = self.text.len();
642
643 if let Some(highlight_style) = chunk
644 .syntax_highlight_id
645 .and_then(|id| id.style(syntax_theme))
646 {
647 let highlight_style = override_style.map_or(highlight_style, |override_style| {
648 highlight_style.highlight(override_style)
649 });
650 self.highlights.push((start..end, highlight_style));
651 } else if let Some(override_style) = override_style {
652 self.highlights.push((start..end, override_style));
653 }
654 }
655 }
656
657 fn highlighted_chunks<'a>(
658 range: Range<usize>,
659 snapshot: &'a text::BufferSnapshot,
660 syntax_snapshot: &'a SyntaxSnapshot,
661 ) -> BufferChunks<'a> {
662 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
663 grammar.highlights_query.as_ref()
664 });
665
666 let highlight_maps = captures
667 .grammars()
668 .iter()
669 .map(|grammar| grammar.highlight_map())
670 .collect();
671
672 BufferChunks::new(
673 snapshot.as_rope(),
674 range,
675 Some((captures, highlight_maps)),
676 false,
677 None,
678 )
679 }
680}
681
682#[derive(Clone)]
683pub struct EditPreview {
684 old_snapshot: text::BufferSnapshot,
685 applied_edits_snapshot: text::BufferSnapshot,
686 syntax_snapshot: SyntaxSnapshot,
687}
688
689impl EditPreview {
690 pub fn highlight_edits(
691 &self,
692 current_snapshot: &BufferSnapshot,
693 edits: &[(Range<Anchor>, String)],
694 include_deletions: bool,
695 cx: &App,
696 ) -> HighlightedText {
697 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
698 return HighlightedText::default();
699 };
700
701 let mut highlighted_text = HighlightedTextBuilder::default();
702
703 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
704
705 let insertion_highlight_style = HighlightStyle {
706 background_color: Some(cx.theme().status().created_background),
707 ..Default::default()
708 };
709 let deletion_highlight_style = HighlightStyle {
710 background_color: Some(cx.theme().status().deleted_background),
711 ..Default::default()
712 };
713 let syntax_theme = cx.theme().syntax();
714
715 for (range, edit_text) in edits {
716 let edit_new_end_in_preview_snapshot = range
717 .end
718 .bias_right(&self.old_snapshot)
719 .to_offset(&self.applied_edits_snapshot);
720 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
721
722 let unchanged_range_in_preview_snapshot =
723 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
724 if !unchanged_range_in_preview_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 unchanged_range_in_preview_snapshot,
727 &self.applied_edits_snapshot,
728 &self.syntax_snapshot,
729 None,
730 syntax_theme,
731 );
732 }
733
734 let range_in_current_snapshot = range.to_offset(current_snapshot);
735 if include_deletions && !range_in_current_snapshot.is_empty() {
736 highlighted_text.add_text_from_buffer_range(
737 range_in_current_snapshot,
738 ¤t_snapshot.text,
739 ¤t_snapshot.syntax,
740 Some(deletion_highlight_style),
741 syntax_theme,
742 );
743 }
744
745 if !edit_text.is_empty() {
746 highlighted_text.add_text_from_buffer_range(
747 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
748 &self.applied_edits_snapshot,
749 &self.syntax_snapshot,
750 Some(insertion_highlight_style),
751 syntax_theme,
752 );
753 }
754
755 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
756 }
757
758 highlighted_text.add_text_from_buffer_range(
759 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
760 &self.applied_edits_snapshot,
761 &self.syntax_snapshot,
762 None,
763 syntax_theme,
764 );
765
766 highlighted_text.build()
767 }
768
769 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
770 let (first, _) = edits.first()?;
771 let (last, _) = edits.last()?;
772
773 let start = first
774 .start
775 .bias_left(&self.old_snapshot)
776 .to_point(&self.applied_edits_snapshot);
777 let end = last
778 .end
779 .bias_right(&self.old_snapshot)
780 .to_point(&self.applied_edits_snapshot);
781
782 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
783 let range = Point::new(start.row, 0)
784 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
785
786 Some(range.to_offset(&self.applied_edits_snapshot))
787 }
788}
789
790#[derive(Clone, Debug, PartialEq, Eq)]
791pub struct BracketMatch {
792 pub open_range: Range<usize>,
793 pub close_range: Range<usize>,
794 pub newline_only: bool,
795}
796
797impl Buffer {
798 /// Create a new buffer with the given base text.
799 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
800 Self::build(
801 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
802 None,
803 Capability::ReadWrite,
804 )
805 }
806
807 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
808 pub fn local_normalized(
809 base_text_normalized: Rope,
810 line_ending: LineEnding,
811 cx: &Context<Self>,
812 ) -> Self {
813 Self::build(
814 TextBuffer::new_normalized(
815 0,
816 cx.entity_id().as_non_zero_u64().into(),
817 line_ending,
818 base_text_normalized,
819 ),
820 None,
821 Capability::ReadWrite,
822 )
823 }
824
825 /// Create a new buffer that is a replica of a remote buffer.
826 pub fn remote(
827 remote_id: BufferId,
828 replica_id: ReplicaId,
829 capability: Capability,
830 base_text: impl Into<String>,
831 ) -> Self {
832 Self::build(
833 TextBuffer::new(replica_id, remote_id, base_text.into()),
834 None,
835 capability,
836 )
837 }
838
839 /// Create a new buffer that is a replica of a remote buffer, populating its
840 /// state from the given protobuf message.
841 pub fn from_proto(
842 replica_id: ReplicaId,
843 capability: Capability,
844 message: proto::BufferState,
845 file: Option<Arc<dyn File>>,
846 ) -> Result<Self> {
847 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
848 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
849 let mut this = Self::build(buffer, file, capability);
850 this.text.set_line_ending(proto::deserialize_line_ending(
851 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
852 ));
853 this.saved_version = proto::deserialize_version(&message.saved_version);
854 this.saved_mtime = message.saved_mtime.map(|time| time.into());
855 Ok(this)
856 }
857
858 /// Serialize the buffer's state to a protobuf message.
859 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
860 proto::BufferState {
861 id: self.remote_id().into(),
862 file: self.file.as_ref().map(|f| f.to_proto(cx)),
863 base_text: self.base_text().to_string(),
864 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
865 saved_version: proto::serialize_version(&self.saved_version),
866 saved_mtime: self.saved_mtime.map(|time| time.into()),
867 }
868 }
869
870 /// Serialize as protobufs all of the changes to the buffer since the given version.
871 pub fn serialize_ops(
872 &self,
873 since: Option<clock::Global>,
874 cx: &App,
875 ) -> Task<Vec<proto::Operation>> {
876 let mut operations = Vec::new();
877 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
878
879 operations.extend(self.remote_selections.iter().map(|(_, set)| {
880 proto::serialize_operation(&Operation::UpdateSelections {
881 selections: set.selections.clone(),
882 lamport_timestamp: set.lamport_timestamp,
883 line_mode: set.line_mode,
884 cursor_shape: set.cursor_shape,
885 })
886 }));
887
888 for (server_id, diagnostics) in &self.diagnostics {
889 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
890 lamport_timestamp: self.diagnostics_timestamp,
891 server_id: *server_id,
892 diagnostics: diagnostics.iter().cloned().collect(),
893 }));
894 }
895
896 for (server_id, completions) in &self.completion_triggers_per_language_server {
897 operations.push(proto::serialize_operation(
898 &Operation::UpdateCompletionTriggers {
899 triggers: completions.iter().cloned().collect(),
900 lamport_timestamp: self.completion_triggers_timestamp,
901 server_id: *server_id,
902 },
903 ));
904 }
905
906 let text_operations = self.text.operations().clone();
907 cx.background_spawn(async move {
908 let since = since.unwrap_or_default();
909 operations.extend(
910 text_operations
911 .iter()
912 .filter(|(_, op)| !since.observed(op.timestamp()))
913 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
914 );
915 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
916 operations
917 })
918 }
919
920 /// Assign a language to the buffer, returning the buffer.
921 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
922 self.set_language(Some(language), cx);
923 self
924 }
925
926 /// Returns the [`Capability`] of this buffer.
927 pub fn capability(&self) -> Capability {
928 self.capability
929 }
930
931 /// Whether this buffer can only be read.
932 pub fn read_only(&self) -> bool {
933 self.capability == Capability::ReadOnly
934 }
935
936 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
937 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
938 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
939 let snapshot = buffer.snapshot();
940 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
941 Self {
942 saved_mtime,
943 saved_version: buffer.version(),
944 preview_version: buffer.version(),
945 reload_task: None,
946 transaction_depth: 0,
947 was_dirty_before_starting_transaction: None,
948 has_unsaved_edits: Cell::new((buffer.version(), false)),
949 text: buffer,
950 branch_state: None,
951 file,
952 capability,
953 syntax_map,
954 reparse: None,
955 non_text_state_update_count: 0,
956 sync_parse_timeout: Duration::from_millis(1),
957 parse_status: watch::channel(ParseStatus::Idle),
958 autoindent_requests: Default::default(),
959 wait_for_autoindent_txs: Default::default(),
960 pending_autoindent: Default::default(),
961 language: None,
962 remote_selections: Default::default(),
963 diagnostics: Default::default(),
964 diagnostics_timestamp: Default::default(),
965 completion_triggers: Default::default(),
966 completion_triggers_per_language_server: Default::default(),
967 completion_triggers_timestamp: Default::default(),
968 deferred_ops: OperationQueue::new(),
969 has_conflict: false,
970 change_bits: Default::default(),
971 _subscriptions: Vec::new(),
972 }
973 }
974
975 pub fn build_snapshot(
976 text: Rope,
977 language: Option<Arc<Language>>,
978 language_registry: Option<Arc<LanguageRegistry>>,
979 cx: &mut App,
980 ) -> impl Future<Output = BufferSnapshot> + use<> {
981 let entity_id = cx.reserve_entity::<Self>().entity_id();
982 let buffer_id = entity_id.as_non_zero_u64().into();
983 async move {
984 let text =
985 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
986 let mut syntax = SyntaxMap::new(&text).snapshot();
987 if let Some(language) = language.clone() {
988 let language_registry = language_registry.clone();
989 syntax.reparse(&text, language_registry, language);
990 }
991 BufferSnapshot {
992 text,
993 syntax,
994 file: None,
995 diagnostics: Default::default(),
996 remote_selections: Default::default(),
997 language,
998 non_text_state_update_count: 0,
999 }
1000 }
1001 }
1002
1003 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1004 let entity_id = cx.reserve_entity::<Self>().entity_id();
1005 let buffer_id = entity_id.as_non_zero_u64().into();
1006 let text =
1007 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1008 let syntax = SyntaxMap::new(&text).snapshot();
1009 BufferSnapshot {
1010 text,
1011 syntax,
1012 file: None,
1013 diagnostics: Default::default(),
1014 remote_selections: Default::default(),
1015 language: None,
1016 non_text_state_update_count: 0,
1017 }
1018 }
1019
1020 #[cfg(any(test, feature = "test-support"))]
1021 pub fn build_snapshot_sync(
1022 text: Rope,
1023 language: Option<Arc<Language>>,
1024 language_registry: Option<Arc<LanguageRegistry>>,
1025 cx: &mut App,
1026 ) -> BufferSnapshot {
1027 let entity_id = cx.reserve_entity::<Self>().entity_id();
1028 let buffer_id = entity_id.as_non_zero_u64().into();
1029 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1030 let mut syntax = SyntaxMap::new(&text).snapshot();
1031 if let Some(language) = language.clone() {
1032 syntax.reparse(&text, language_registry, language);
1033 }
1034 BufferSnapshot {
1035 text,
1036 syntax,
1037 file: None,
1038 diagnostics: Default::default(),
1039 remote_selections: Default::default(),
1040 language,
1041 non_text_state_update_count: 0,
1042 }
1043 }
1044
1045 /// Retrieve a snapshot of the buffer's current state. This is computationally
1046 /// cheap, and allows reading from the buffer on a background thread.
1047 pub fn snapshot(&self) -> BufferSnapshot {
1048 let text = self.text.snapshot();
1049 let mut syntax_map = self.syntax_map.lock();
1050 syntax_map.interpolate(&text);
1051 let syntax = syntax_map.snapshot();
1052
1053 BufferSnapshot {
1054 text,
1055 syntax,
1056 file: self.file.clone(),
1057 remote_selections: self.remote_selections.clone(),
1058 diagnostics: self.diagnostics.clone(),
1059 language: self.language.clone(),
1060 non_text_state_update_count: self.non_text_state_update_count,
1061 }
1062 }
1063
1064 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1065 let this = cx.entity();
1066 cx.new(|cx| {
1067 let mut branch = Self {
1068 branch_state: Some(BufferBranchState {
1069 base_buffer: this.clone(),
1070 merged_operations: Default::default(),
1071 }),
1072 language: self.language.clone(),
1073 has_conflict: self.has_conflict,
1074 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1075 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1076 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1077 };
1078 if let Some(language_registry) = self.language_registry() {
1079 branch.set_language_registry(language_registry);
1080 }
1081
1082 // Reparse the branch buffer so that we get syntax highlighting immediately.
1083 branch.reparse(cx);
1084
1085 branch
1086 })
1087 }
1088
1089 pub fn preview_edits(
1090 &self,
1091 edits: Arc<[(Range<Anchor>, String)]>,
1092 cx: &App,
1093 ) -> Task<EditPreview> {
1094 let registry = self.language_registry();
1095 let language = self.language().cloned();
1096 let old_snapshot = self.text.snapshot();
1097 let mut branch_buffer = self.text.branch();
1098 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1099 cx.background_spawn(async move {
1100 if !edits.is_empty() {
1101 if let Some(language) = language.clone() {
1102 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1103 }
1104
1105 branch_buffer.edit(edits.iter().cloned());
1106 let snapshot = branch_buffer.snapshot();
1107 syntax_snapshot.interpolate(&snapshot);
1108
1109 if let Some(language) = language {
1110 syntax_snapshot.reparse(&snapshot, registry, language);
1111 }
1112 }
1113 EditPreview {
1114 old_snapshot,
1115 applied_edits_snapshot: branch_buffer.snapshot(),
1116 syntax_snapshot,
1117 }
1118 })
1119 }
1120
1121 /// Applies all of the changes in this buffer that intersect any of the
1122 /// given `ranges` to its base buffer.
1123 ///
1124 /// If `ranges` is empty, then all changes will be applied. This buffer must
1125 /// be a branch buffer to call this method.
1126 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1127 let Some(base_buffer) = self.base_buffer() else {
1128 debug_panic!("not a branch buffer");
1129 return;
1130 };
1131
1132 let mut ranges = if ranges.is_empty() {
1133 &[0..usize::MAX]
1134 } else {
1135 ranges.as_slice()
1136 }
1137 .iter()
1138 .peekable();
1139
1140 let mut edits = Vec::new();
1141 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1142 let mut is_included = false;
1143 while let Some(range) = ranges.peek() {
1144 if range.end < edit.new.start {
1145 ranges.next().unwrap();
1146 } else {
1147 if range.start <= edit.new.end {
1148 is_included = true;
1149 }
1150 break;
1151 }
1152 }
1153
1154 if is_included {
1155 edits.push((
1156 edit.old.clone(),
1157 self.text_for_range(edit.new.clone()).collect::<String>(),
1158 ));
1159 }
1160 }
1161
1162 let operation = base_buffer.update(cx, |base_buffer, cx| {
1163 // cx.emit(BufferEvent::DiffBaseChanged);
1164 base_buffer.edit(edits, None, cx)
1165 });
1166
1167 if let Some(operation) = operation
1168 && let Some(BufferBranchState {
1169 merged_operations, ..
1170 }) = &mut self.branch_state
1171 {
1172 merged_operations.push(operation);
1173 }
1174 }
1175
1176 fn on_base_buffer_event(
1177 &mut self,
1178 _: Entity<Buffer>,
1179 event: &BufferEvent,
1180 cx: &mut Context<Self>,
1181 ) {
1182 let BufferEvent::Operation { operation, .. } = event else {
1183 return;
1184 };
1185 let Some(BufferBranchState {
1186 merged_operations, ..
1187 }) = &mut self.branch_state
1188 else {
1189 return;
1190 };
1191
1192 let mut operation_to_undo = None;
1193 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1194 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1195 {
1196 merged_operations.remove(ix);
1197 operation_to_undo = Some(operation.timestamp);
1198 }
1199
1200 self.apply_ops([operation.clone()], cx);
1201
1202 if let Some(timestamp) = operation_to_undo {
1203 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1204 self.undo_operations(counts, cx);
1205 }
1206 }
1207
1208 #[cfg(test)]
1209 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1210 &self.text
1211 }
1212
1213 /// Retrieve a snapshot of the buffer's raw text, without any
1214 /// language-related state like the syntax tree or diagnostics.
1215 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1216 self.text.snapshot()
1217 }
1218
1219 /// The file associated with the buffer, if any.
1220 pub fn file(&self) -> Option<&Arc<dyn File>> {
1221 self.file.as_ref()
1222 }
1223
1224 /// The version of the buffer that was last saved or reloaded from disk.
1225 pub fn saved_version(&self) -> &clock::Global {
1226 &self.saved_version
1227 }
1228
1229 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1230 pub fn saved_mtime(&self) -> Option<MTime> {
1231 self.saved_mtime
1232 }
1233
1234 /// Assign a language to the buffer.
1235 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1236 self.non_text_state_update_count += 1;
1237 self.syntax_map.lock().clear(&self.text);
1238 self.language = language;
1239 self.was_changed();
1240 self.reparse(cx);
1241 cx.emit(BufferEvent::LanguageChanged);
1242 }
1243
1244 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1245 /// other languages if parts of the buffer are written in different languages.
1246 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1247 self.syntax_map
1248 .lock()
1249 .set_language_registry(language_registry);
1250 }
1251
1252 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1253 self.syntax_map.lock().language_registry()
1254 }
1255
1256 /// Assign the line ending type to the buffer.
1257 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1258 self.text.set_line_ending(line_ending);
1259
1260 let lamport_timestamp = self.text.lamport_clock.tick();
1261 self.send_operation(
1262 Operation::UpdateLineEnding {
1263 line_ending,
1264 lamport_timestamp,
1265 },
1266 true,
1267 cx,
1268 );
1269 }
1270
1271 /// Assign the buffer a new [`Capability`].
1272 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1273 if self.capability != capability {
1274 self.capability = capability;
1275 cx.emit(BufferEvent::CapabilityChanged)
1276 }
1277 }
1278
1279 /// This method is called to signal that the buffer has been saved.
1280 pub fn did_save(
1281 &mut self,
1282 version: clock::Global,
1283 mtime: Option<MTime>,
1284 cx: &mut Context<Self>,
1285 ) {
1286 self.saved_version = version;
1287 self.has_unsaved_edits
1288 .set((self.saved_version().clone(), false));
1289 self.has_conflict = false;
1290 self.saved_mtime = mtime;
1291 self.was_changed();
1292 cx.emit(BufferEvent::Saved);
1293 cx.notify();
1294 }
1295
1296 /// Reloads the contents of the buffer from disk.
1297 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1298 let (tx, rx) = futures::channel::oneshot::channel();
1299 let prev_version = self.text.version();
1300 self.reload_task = Some(cx.spawn(async move |this, cx| {
1301 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1302 let file = this.file.as_ref()?.as_local()?;
1303
1304 Some((file.disk_state().mtime(), file.load(cx)))
1305 })?
1306 else {
1307 return Ok(());
1308 };
1309
1310 let new_text = new_text.await?;
1311 let diff = this
1312 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1313 .await;
1314 this.update(cx, |this, cx| {
1315 if this.version() == diff.base_version {
1316 this.finalize_last_transaction();
1317 this.apply_diff(diff, cx);
1318 tx.send(this.finalize_last_transaction().cloned()).ok();
1319 this.has_conflict = false;
1320 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1321 } else {
1322 if !diff.edits.is_empty()
1323 || this
1324 .edits_since::<usize>(&diff.base_version)
1325 .next()
1326 .is_some()
1327 {
1328 this.has_conflict = true;
1329 }
1330
1331 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1332 }
1333
1334 this.reload_task.take();
1335 })
1336 }));
1337 rx
1338 }
1339
1340 /// This method is called to signal that the buffer has been reloaded.
1341 pub fn did_reload(
1342 &mut self,
1343 version: clock::Global,
1344 line_ending: LineEnding,
1345 mtime: Option<MTime>,
1346 cx: &mut Context<Self>,
1347 ) {
1348 self.saved_version = version;
1349 self.has_unsaved_edits
1350 .set((self.saved_version.clone(), false));
1351 self.text.set_line_ending(line_ending);
1352 self.saved_mtime = mtime;
1353 cx.emit(BufferEvent::Reloaded);
1354 cx.notify();
1355 }
1356
1357 /// Updates the [`File`] backing this buffer. This should be called when
1358 /// the file has changed or has been deleted.
1359 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1360 let was_dirty = self.is_dirty();
1361 let mut file_changed = false;
1362
1363 if let Some(old_file) = self.file.as_ref() {
1364 if new_file.path() != old_file.path() {
1365 file_changed = true;
1366 }
1367
1368 let old_state = old_file.disk_state();
1369 let new_state = new_file.disk_state();
1370 if old_state != new_state {
1371 file_changed = true;
1372 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1373 cx.emit(BufferEvent::ReloadNeeded)
1374 }
1375 }
1376 } else {
1377 file_changed = true;
1378 };
1379
1380 self.file = Some(new_file);
1381 if file_changed {
1382 self.was_changed();
1383 self.non_text_state_update_count += 1;
1384 if was_dirty != self.is_dirty() {
1385 cx.emit(BufferEvent::DirtyChanged);
1386 }
1387 cx.emit(BufferEvent::FileHandleChanged);
1388 cx.notify();
1389 }
1390 }
1391
1392 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1393 Some(self.branch_state.as_ref()?.base_buffer.clone())
1394 }
1395
1396 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1397 pub fn language(&self) -> Option<&Arc<Language>> {
1398 self.language.as_ref()
1399 }
1400
1401 /// Returns the [`Language`] at the given location.
1402 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1403 let offset = position.to_offset(self);
1404 let mut is_first = true;
1405 let start_anchor = self.anchor_before(offset);
1406 let end_anchor = self.anchor_after(offset);
1407 self.syntax_map
1408 .lock()
1409 .layers_for_range(offset..offset, &self.text, false)
1410 .filter(|layer| {
1411 if is_first {
1412 is_first = false;
1413 return true;
1414 }
1415
1416 layer
1417 .included_sub_ranges
1418 .map(|sub_ranges| {
1419 sub_ranges.iter().any(|sub_range| {
1420 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1421 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1422 !is_before_start && !is_after_end
1423 })
1424 })
1425 .unwrap_or(true)
1426 })
1427 .last()
1428 .map(|info| info.language.clone())
1429 .or_else(|| self.language.clone())
1430 }
1431
1432 /// Returns each [`Language`] for the active syntax layers at the given location.
1433 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1434 let offset = position.to_offset(self);
1435 let mut languages: Vec<Arc<Language>> = self
1436 .syntax_map
1437 .lock()
1438 .layers_for_range(offset..offset, &self.text, false)
1439 .map(|info| info.language.clone())
1440 .collect();
1441
1442 if languages.is_empty()
1443 && let Some(buffer_language) = self.language()
1444 {
1445 languages.push(buffer_language.clone());
1446 }
1447
1448 languages
1449 }
1450
1451 /// An integer version number that accounts for all updates besides
1452 /// the buffer's text itself (which is versioned via a version vector).
1453 pub fn non_text_state_update_count(&self) -> usize {
1454 self.non_text_state_update_count
1455 }
1456
1457 /// Whether the buffer is being parsed in the background.
1458 #[cfg(any(test, feature = "test-support"))]
1459 pub fn is_parsing(&self) -> bool {
1460 self.reparse.is_some()
1461 }
1462
1463 /// Indicates whether the buffer contains any regions that may be
1464 /// written in a language that hasn't been loaded yet.
1465 pub fn contains_unknown_injections(&self) -> bool {
1466 self.syntax_map.lock().contains_unknown_injections()
1467 }
1468
1469 #[cfg(any(test, feature = "test-support"))]
1470 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1471 self.sync_parse_timeout = timeout;
1472 }
1473
1474 /// Called after an edit to synchronize the buffer's main parse tree with
1475 /// the buffer's new underlying state.
1476 ///
1477 /// Locks the syntax map and interpolates the edits since the last reparse
1478 /// into the foreground syntax tree.
1479 ///
1480 /// Then takes a stable snapshot of the syntax map before unlocking it.
1481 /// The snapshot with the interpolated edits is sent to a background thread,
1482 /// where we ask Tree-sitter to perform an incremental parse.
1483 ///
1484 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1485 /// waiting on the parse to complete. As soon as it completes, we proceed
1486 /// synchronously, unless a 1ms timeout elapses.
1487 ///
1488 /// If we time out waiting on the parse, we spawn a second task waiting
1489 /// until the parse does complete and return with the interpolated tree still
1490 /// in the foreground. When the background parse completes, call back into
1491 /// the main thread and assign the foreground parse state.
1492 ///
1493 /// If the buffer or grammar changed since the start of the background parse,
1494 /// initiate an additional reparse recursively. To avoid concurrent parses
1495 /// for the same buffer, we only initiate a new parse if we are not already
1496 /// parsing in the background.
1497 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1498 if self.reparse.is_some() {
1499 return;
1500 }
1501 let language = if let Some(language) = self.language.clone() {
1502 language
1503 } else {
1504 return;
1505 };
1506
1507 let text = self.text_snapshot();
1508 let parsed_version = self.version();
1509
1510 let mut syntax_map = self.syntax_map.lock();
1511 syntax_map.interpolate(&text);
1512 let language_registry = syntax_map.language_registry();
1513 let mut syntax_snapshot = syntax_map.snapshot();
1514 drop(syntax_map);
1515
1516 let parse_task = cx.background_spawn({
1517 let language = language.clone();
1518 let language_registry = language_registry.clone();
1519 async move {
1520 syntax_snapshot.reparse(&text, language_registry, language);
1521 syntax_snapshot
1522 }
1523 });
1524
1525 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1526 match cx
1527 .background_executor()
1528 .block_with_timeout(self.sync_parse_timeout, parse_task)
1529 {
1530 Ok(new_syntax_snapshot) => {
1531 self.did_finish_parsing(new_syntax_snapshot, cx);
1532 self.reparse = None;
1533 }
1534 Err(parse_task) => {
1535 self.reparse = Some(cx.spawn(async move |this, cx| {
1536 let new_syntax_map = parse_task.await;
1537 this.update(cx, move |this, cx| {
1538 let grammar_changed =
1539 this.language.as_ref().is_none_or(|current_language| {
1540 !Arc::ptr_eq(&language, current_language)
1541 });
1542 let language_registry_changed = new_syntax_map
1543 .contains_unknown_injections()
1544 && language_registry.is_some_and(|registry| {
1545 registry.version() != new_syntax_map.language_registry_version()
1546 });
1547 let parse_again = language_registry_changed
1548 || grammar_changed
1549 || this.version.changed_since(&parsed_version);
1550 this.did_finish_parsing(new_syntax_map, cx);
1551 this.reparse = None;
1552 if parse_again {
1553 this.reparse(cx);
1554 }
1555 })
1556 .ok();
1557 }));
1558 }
1559 }
1560 }
1561
1562 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1563 self.was_changed();
1564 self.non_text_state_update_count += 1;
1565 self.syntax_map.lock().did_parse(syntax_snapshot);
1566 self.request_autoindent(cx);
1567 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1568 cx.emit(BufferEvent::Reparsed);
1569 cx.notify();
1570 }
1571
1572 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1573 self.parse_status.1.clone()
1574 }
1575
1576 /// Assign to the buffer a set of diagnostics created by a given language server.
1577 pub fn update_diagnostics(
1578 &mut self,
1579 server_id: LanguageServerId,
1580 diagnostics: DiagnosticSet,
1581 cx: &mut Context<Self>,
1582 ) {
1583 let lamport_timestamp = self.text.lamport_clock.tick();
1584 let op = Operation::UpdateDiagnostics {
1585 server_id,
1586 diagnostics: diagnostics.iter().cloned().collect(),
1587 lamport_timestamp,
1588 };
1589
1590 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1591 self.send_operation(op, true, cx);
1592 }
1593
1594 pub fn buffer_diagnostics(
1595 &self,
1596 for_server: Option<LanguageServerId>,
1597 ) -> Vec<&DiagnosticEntry<Anchor>> {
1598 match for_server {
1599 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1600 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1601 Err(_) => Vec::new(),
1602 },
1603 None => self
1604 .diagnostics
1605 .iter()
1606 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1607 .collect(),
1608 }
1609 }
1610
1611 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1612 if let Some(indent_sizes) = self.compute_autoindents() {
1613 let indent_sizes = cx.background_spawn(indent_sizes);
1614 match cx
1615 .background_executor()
1616 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1617 {
1618 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1619 Err(indent_sizes) => {
1620 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1621 let indent_sizes = indent_sizes.await;
1622 this.update(cx, |this, cx| {
1623 this.apply_autoindents(indent_sizes, cx);
1624 })
1625 .ok();
1626 }));
1627 }
1628 }
1629 } else {
1630 self.autoindent_requests.clear();
1631 for tx in self.wait_for_autoindent_txs.drain(..) {
1632 tx.send(()).ok();
1633 }
1634 }
1635 }
1636
1637 fn compute_autoindents(
1638 &self,
1639 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1640 let max_rows_between_yields = 100;
1641 let snapshot = self.snapshot();
1642 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1643 return None;
1644 }
1645
1646 let autoindent_requests = self.autoindent_requests.clone();
1647 Some(async move {
1648 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1649 for request in autoindent_requests {
1650 // Resolve each edited range to its row in the current buffer and in the
1651 // buffer before this batch of edits.
1652 let mut row_ranges = Vec::new();
1653 let mut old_to_new_rows = BTreeMap::new();
1654 let mut language_indent_sizes_by_new_row = Vec::new();
1655 for entry in &request.entries {
1656 let position = entry.range.start;
1657 let new_row = position.to_point(&snapshot).row;
1658 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1659 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1660
1661 if !entry.first_line_is_new {
1662 let old_row = position.to_point(&request.before_edit).row;
1663 old_to_new_rows.insert(old_row, new_row);
1664 }
1665 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1666 }
1667
1668 // Build a map containing the suggested indentation for each of the edited lines
1669 // with respect to the state of the buffer before these edits. This map is keyed
1670 // by the rows for these lines in the current state of the buffer.
1671 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1672 let old_edited_ranges =
1673 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1674 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1675 let mut language_indent_size = IndentSize::default();
1676 for old_edited_range in old_edited_ranges {
1677 let suggestions = request
1678 .before_edit
1679 .suggest_autoindents(old_edited_range.clone())
1680 .into_iter()
1681 .flatten();
1682 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1683 if let Some(suggestion) = suggestion {
1684 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1685
1686 // Find the indent size based on the language for this row.
1687 while let Some((row, size)) = language_indent_sizes.peek() {
1688 if *row > new_row {
1689 break;
1690 }
1691 language_indent_size = *size;
1692 language_indent_sizes.next();
1693 }
1694
1695 let suggested_indent = old_to_new_rows
1696 .get(&suggestion.basis_row)
1697 .and_then(|from_row| {
1698 Some(old_suggestions.get(from_row).copied()?.0)
1699 })
1700 .unwrap_or_else(|| {
1701 request
1702 .before_edit
1703 .indent_size_for_line(suggestion.basis_row)
1704 })
1705 .with_delta(suggestion.delta, language_indent_size);
1706 old_suggestions
1707 .insert(new_row, (suggested_indent, suggestion.within_error));
1708 }
1709 }
1710 yield_now().await;
1711 }
1712
1713 // Compute new suggestions for each line, but only include them in the result
1714 // if they differ from the old suggestion for that line.
1715 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1716 let mut language_indent_size = IndentSize::default();
1717 for (row_range, original_indent_column) in row_ranges {
1718 let new_edited_row_range = if request.is_block_mode {
1719 row_range.start..row_range.start + 1
1720 } else {
1721 row_range.clone()
1722 };
1723
1724 let suggestions = snapshot
1725 .suggest_autoindents(new_edited_row_range.clone())
1726 .into_iter()
1727 .flatten();
1728 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1729 if let Some(suggestion) = suggestion {
1730 // Find the indent size based on the language for this row.
1731 while let Some((row, size)) = language_indent_sizes.peek() {
1732 if *row > new_row {
1733 break;
1734 }
1735 language_indent_size = *size;
1736 language_indent_sizes.next();
1737 }
1738
1739 let suggested_indent = indent_sizes
1740 .get(&suggestion.basis_row)
1741 .copied()
1742 .map(|e| e.0)
1743 .unwrap_or_else(|| {
1744 snapshot.indent_size_for_line(suggestion.basis_row)
1745 })
1746 .with_delta(suggestion.delta, language_indent_size);
1747
1748 if old_suggestions.get(&new_row).is_none_or(
1749 |(old_indentation, was_within_error)| {
1750 suggested_indent != *old_indentation
1751 && (!suggestion.within_error || *was_within_error)
1752 },
1753 ) {
1754 indent_sizes.insert(
1755 new_row,
1756 (suggested_indent, request.ignore_empty_lines),
1757 );
1758 }
1759 }
1760 }
1761
1762 if let (true, Some(original_indent_column)) =
1763 (request.is_block_mode, original_indent_column)
1764 {
1765 let new_indent =
1766 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1767 *indent
1768 } else {
1769 snapshot.indent_size_for_line(row_range.start)
1770 };
1771 let delta = new_indent.len as i64 - original_indent_column as i64;
1772 if delta != 0 {
1773 for row in row_range.skip(1) {
1774 indent_sizes.entry(row).or_insert_with(|| {
1775 let mut size = snapshot.indent_size_for_line(row);
1776 if size.kind == new_indent.kind {
1777 match delta.cmp(&0) {
1778 Ordering::Greater => size.len += delta as u32,
1779 Ordering::Less => {
1780 size.len = size.len.saturating_sub(-delta as u32)
1781 }
1782 Ordering::Equal => {}
1783 }
1784 }
1785 (size, request.ignore_empty_lines)
1786 });
1787 }
1788 }
1789 }
1790
1791 yield_now().await;
1792 }
1793 }
1794
1795 indent_sizes
1796 .into_iter()
1797 .filter_map(|(row, (indent, ignore_empty_lines))| {
1798 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1799 None
1800 } else {
1801 Some((row, indent))
1802 }
1803 })
1804 .collect()
1805 })
1806 }
1807
1808 fn apply_autoindents(
1809 &mut self,
1810 indent_sizes: BTreeMap<u32, IndentSize>,
1811 cx: &mut Context<Self>,
1812 ) {
1813 self.autoindent_requests.clear();
1814 for tx in self.wait_for_autoindent_txs.drain(..) {
1815 tx.send(()).ok();
1816 }
1817
1818 let edits: Vec<_> = indent_sizes
1819 .into_iter()
1820 .filter_map(|(row, indent_size)| {
1821 let current_size = indent_size_for_line(self, row);
1822 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1823 })
1824 .collect();
1825
1826 let preserve_preview = self.preserve_preview();
1827 self.edit(edits, None, cx);
1828 if preserve_preview {
1829 self.refresh_preview();
1830 }
1831 }
1832
1833 /// Create a minimal edit that will cause the given row to be indented
1834 /// with the given size. After applying this edit, the length of the line
1835 /// will always be at least `new_size.len`.
1836 pub fn edit_for_indent_size_adjustment(
1837 row: u32,
1838 current_size: IndentSize,
1839 new_size: IndentSize,
1840 ) -> Option<(Range<Point>, String)> {
1841 if new_size.kind == current_size.kind {
1842 match new_size.len.cmp(¤t_size.len) {
1843 Ordering::Greater => {
1844 let point = Point::new(row, 0);
1845 Some((
1846 point..point,
1847 iter::repeat(new_size.char())
1848 .take((new_size.len - current_size.len) as usize)
1849 .collect::<String>(),
1850 ))
1851 }
1852
1853 Ordering::Less => Some((
1854 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1855 String::new(),
1856 )),
1857
1858 Ordering::Equal => None,
1859 }
1860 } else {
1861 Some((
1862 Point::new(row, 0)..Point::new(row, current_size.len),
1863 iter::repeat(new_size.char())
1864 .take(new_size.len as usize)
1865 .collect::<String>(),
1866 ))
1867 }
1868 }
1869
1870 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1871 /// and the given new text.
1872 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1873 let old_text = self.as_rope().clone();
1874 let base_version = self.version();
1875 cx.background_executor()
1876 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1877 let old_text = old_text.to_string();
1878 let line_ending = LineEnding::detect(&new_text);
1879 LineEnding::normalize(&mut new_text);
1880 let edits = text_diff(&old_text, &new_text);
1881 Diff {
1882 base_version,
1883 line_ending,
1884 edits,
1885 }
1886 })
1887 }
1888
1889 /// Spawns a background task that searches the buffer for any whitespace
1890 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1891 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1892 let old_text = self.as_rope().clone();
1893 let line_ending = self.line_ending();
1894 let base_version = self.version();
1895 cx.background_spawn(async move {
1896 let ranges = trailing_whitespace_ranges(&old_text);
1897 let empty = Arc::<str>::from("");
1898 Diff {
1899 base_version,
1900 line_ending,
1901 edits: ranges
1902 .into_iter()
1903 .map(|range| (range, empty.clone()))
1904 .collect(),
1905 }
1906 })
1907 }
1908
1909 /// Ensures that the buffer ends with a single newline character, and
1910 /// no other whitespace. Skips if the buffer is empty.
1911 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1912 let len = self.len();
1913 if len == 0 {
1914 return;
1915 }
1916 let mut offset = len;
1917 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1918 let non_whitespace_len = chunk
1919 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1920 .len();
1921 offset -= chunk.len();
1922 offset += non_whitespace_len;
1923 if non_whitespace_len != 0 {
1924 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1925 return;
1926 }
1927 break;
1928 }
1929 }
1930 self.edit([(offset..len, "\n")], None, cx);
1931 }
1932
1933 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1934 /// calculated, then adjust the diff to account for those changes, and discard any
1935 /// parts of the diff that conflict with those changes.
1936 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1937 let snapshot = self.snapshot();
1938 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1939 let mut delta = 0;
1940 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1941 while let Some(edit_since) = edits_since.peek() {
1942 // If the edit occurs after a diff hunk, then it does not
1943 // affect that hunk.
1944 if edit_since.old.start > range.end {
1945 break;
1946 }
1947 // If the edit precedes the diff hunk, then adjust the hunk
1948 // to reflect the edit.
1949 else if edit_since.old.end < range.start {
1950 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1951 edits_since.next();
1952 }
1953 // If the edit intersects a diff hunk, then discard that hunk.
1954 else {
1955 return None;
1956 }
1957 }
1958
1959 let start = (range.start as i64 + delta) as usize;
1960 let end = (range.end as i64 + delta) as usize;
1961 Some((start..end, new_text))
1962 });
1963
1964 self.start_transaction();
1965 self.text.set_line_ending(diff.line_ending);
1966 self.edit(adjusted_edits, None, cx);
1967 self.end_transaction(cx)
1968 }
1969
1970 fn has_unsaved_edits(&self) -> bool {
1971 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1972
1973 if last_version == self.version {
1974 self.has_unsaved_edits
1975 .set((last_version, has_unsaved_edits));
1976 return has_unsaved_edits;
1977 }
1978
1979 let has_edits = self.has_edits_since(&self.saved_version);
1980 self.has_unsaved_edits
1981 .set((self.version.clone(), has_edits));
1982 has_edits
1983 }
1984
1985 /// Checks if the buffer has unsaved changes.
1986 pub fn is_dirty(&self) -> bool {
1987 if self.capability == Capability::ReadOnly {
1988 return false;
1989 }
1990 if self.has_conflict {
1991 return true;
1992 }
1993 match self.file.as_ref().map(|f| f.disk_state()) {
1994 Some(DiskState::New) | Some(DiskState::Deleted) => {
1995 !self.is_empty() && self.has_unsaved_edits()
1996 }
1997 _ => self.has_unsaved_edits(),
1998 }
1999 }
2000
2001 /// Checks if the buffer and its file have both changed since the buffer
2002 /// was last saved or reloaded.
2003 pub fn has_conflict(&self) -> bool {
2004 if self.has_conflict {
2005 return true;
2006 }
2007 let Some(file) = self.file.as_ref() else {
2008 return false;
2009 };
2010 match file.disk_state() {
2011 DiskState::New => false,
2012 DiskState::Present { mtime } => match self.saved_mtime {
2013 Some(saved_mtime) => {
2014 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2015 }
2016 None => true,
2017 },
2018 DiskState::Deleted => false,
2019 }
2020 }
2021
2022 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2023 pub fn subscribe(&mut self) -> Subscription {
2024 self.text.subscribe()
2025 }
2026
2027 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2028 ///
2029 /// This allows downstream code to check if the buffer's text has changed without
2030 /// waiting for an effect cycle, which would be required if using eents.
2031 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2032 if let Err(ix) = self
2033 .change_bits
2034 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2035 {
2036 self.change_bits.insert(ix, bit);
2037 }
2038 }
2039
2040 fn was_changed(&mut self) {
2041 self.change_bits.retain(|change_bit| {
2042 change_bit.upgrade().is_some_and(|bit| {
2043 bit.replace(true);
2044 true
2045 })
2046 });
2047 }
2048
2049 /// Starts a transaction, if one is not already in-progress. When undoing or
2050 /// redoing edits, all of the edits performed within a transaction are undone
2051 /// or redone together.
2052 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2053 self.start_transaction_at(Instant::now())
2054 }
2055
2056 /// Starts a transaction, providing the current time. Subsequent transactions
2057 /// that occur within a short period of time will be grouped together. This
2058 /// is controlled by the buffer's undo grouping duration.
2059 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2060 self.transaction_depth += 1;
2061 if self.was_dirty_before_starting_transaction.is_none() {
2062 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2063 }
2064 self.text.start_transaction_at(now)
2065 }
2066
2067 /// Terminates the current transaction, if this is the outermost transaction.
2068 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2069 self.end_transaction_at(Instant::now(), cx)
2070 }
2071
2072 /// Terminates the current transaction, providing the current time. Subsequent transactions
2073 /// that occur within a short period of time will be grouped together. This
2074 /// is controlled by the buffer's undo grouping duration.
2075 pub fn end_transaction_at(
2076 &mut self,
2077 now: Instant,
2078 cx: &mut Context<Self>,
2079 ) -> Option<TransactionId> {
2080 assert!(self.transaction_depth > 0);
2081 self.transaction_depth -= 1;
2082 let was_dirty = if self.transaction_depth == 0 {
2083 self.was_dirty_before_starting_transaction.take().unwrap()
2084 } else {
2085 false
2086 };
2087 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2088 self.did_edit(&start_version, was_dirty, cx);
2089 Some(transaction_id)
2090 } else {
2091 None
2092 }
2093 }
2094
2095 /// Manually add a transaction to the buffer's undo history.
2096 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2097 self.text.push_transaction(transaction, now);
2098 }
2099
2100 /// Differs from `push_transaction` in that it does not clear the redo
2101 /// stack. Intended to be used to create a parent transaction to merge
2102 /// potential child transactions into.
2103 ///
2104 /// The caller is responsible for removing it from the undo history using
2105 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2106 /// are merged into this transaction, the caller is responsible for ensuring
2107 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2108 /// cleared is to create transactions with the usual `start_transaction` and
2109 /// `end_transaction` methods and merging the resulting transactions into
2110 /// the transaction created by this method
2111 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2112 self.text.push_empty_transaction(now)
2113 }
2114
2115 /// Prevent the last transaction from being grouped with any subsequent transactions,
2116 /// even if they occur with the buffer's undo grouping duration.
2117 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2118 self.text.finalize_last_transaction()
2119 }
2120
2121 /// Manually group all changes since a given transaction.
2122 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2123 self.text.group_until_transaction(transaction_id);
2124 }
2125
2126 /// Manually remove a transaction from the buffer's undo history
2127 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2128 self.text.forget_transaction(transaction_id)
2129 }
2130
2131 /// Retrieve a transaction from the buffer's undo history
2132 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2133 self.text.get_transaction(transaction_id)
2134 }
2135
2136 /// Manually merge two transactions in the buffer's undo history.
2137 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2138 self.text.merge_transactions(transaction, destination);
2139 }
2140
2141 /// Waits for the buffer to receive operations with the given timestamps.
2142 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2143 &mut self,
2144 edit_ids: It,
2145 ) -> impl Future<Output = Result<()>> + use<It> {
2146 self.text.wait_for_edits(edit_ids)
2147 }
2148
2149 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2150 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2151 &mut self,
2152 anchors: It,
2153 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2154 self.text.wait_for_anchors(anchors)
2155 }
2156
2157 /// Waits for the buffer to receive operations up to the given version.
2158 pub fn wait_for_version(
2159 &mut self,
2160 version: clock::Global,
2161 ) -> impl Future<Output = Result<()>> + use<> {
2162 self.text.wait_for_version(version)
2163 }
2164
2165 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2166 /// [`Buffer::wait_for_version`] to resolve with an error.
2167 pub fn give_up_waiting(&mut self) {
2168 self.text.give_up_waiting();
2169 }
2170
2171 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2172 let mut rx = None;
2173 if !self.autoindent_requests.is_empty() {
2174 let channel = oneshot::channel();
2175 self.wait_for_autoindent_txs.push(channel.0);
2176 rx = Some(channel.1);
2177 }
2178 rx
2179 }
2180
2181 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2182 pub fn set_active_selections(
2183 &mut self,
2184 selections: Arc<[Selection<Anchor>]>,
2185 line_mode: bool,
2186 cursor_shape: CursorShape,
2187 cx: &mut Context<Self>,
2188 ) {
2189 let lamport_timestamp = self.text.lamport_clock.tick();
2190 self.remote_selections.insert(
2191 self.text.replica_id(),
2192 SelectionSet {
2193 selections: selections.clone(),
2194 lamport_timestamp,
2195 line_mode,
2196 cursor_shape,
2197 },
2198 );
2199 self.send_operation(
2200 Operation::UpdateSelections {
2201 selections,
2202 line_mode,
2203 lamport_timestamp,
2204 cursor_shape,
2205 },
2206 true,
2207 cx,
2208 );
2209 self.non_text_state_update_count += 1;
2210 cx.notify();
2211 }
2212
2213 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2214 /// this replica.
2215 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2216 if self
2217 .remote_selections
2218 .get(&self.text.replica_id())
2219 .is_none_or(|set| !set.selections.is_empty())
2220 {
2221 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2222 }
2223 }
2224
2225 pub fn set_agent_selections(
2226 &mut self,
2227 selections: Arc<[Selection<Anchor>]>,
2228 line_mode: bool,
2229 cursor_shape: CursorShape,
2230 cx: &mut Context<Self>,
2231 ) {
2232 let lamport_timestamp = self.text.lamport_clock.tick();
2233 self.remote_selections.insert(
2234 AGENT_REPLICA_ID,
2235 SelectionSet {
2236 selections,
2237 lamport_timestamp,
2238 line_mode,
2239 cursor_shape,
2240 },
2241 );
2242 self.non_text_state_update_count += 1;
2243 cx.notify();
2244 }
2245
2246 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2247 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2248 }
2249
2250 /// Replaces the buffer's entire text.
2251 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2252 where
2253 T: Into<Arc<str>>,
2254 {
2255 self.autoindent_requests.clear();
2256 self.edit([(0..self.len(), text)], None, cx)
2257 }
2258
2259 /// Appends the given text to the end of the buffer.
2260 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2261 where
2262 T: Into<Arc<str>>,
2263 {
2264 self.edit([(self.len()..self.len(), text)], None, cx)
2265 }
2266
2267 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2268 /// delete, and a string of text to insert at that location.
2269 ///
2270 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2271 /// request for the edited ranges, which will be processed when the buffer finishes
2272 /// parsing.
2273 ///
2274 /// Parsing takes place at the end of a transaction, and may compute synchronously
2275 /// or asynchronously, depending on the changes.
2276 pub fn edit<I, S, T>(
2277 &mut self,
2278 edits_iter: I,
2279 autoindent_mode: Option<AutoindentMode>,
2280 cx: &mut Context<Self>,
2281 ) -> Option<clock::Lamport>
2282 where
2283 I: IntoIterator<Item = (Range<S>, T)>,
2284 S: ToOffset,
2285 T: Into<Arc<str>>,
2286 {
2287 // Skip invalid edits and coalesce contiguous ones.
2288 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2289
2290 for (range, new_text) in edits_iter {
2291 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2292
2293 if range.start > range.end {
2294 mem::swap(&mut range.start, &mut range.end);
2295 }
2296 let new_text = new_text.into();
2297 if !new_text.is_empty() || !range.is_empty() {
2298 if let Some((prev_range, prev_text)) = edits.last_mut()
2299 && prev_range.end >= range.start
2300 {
2301 prev_range.end = cmp::max(prev_range.end, range.end);
2302 *prev_text = format!("{prev_text}{new_text}").into();
2303 } else {
2304 edits.push((range, new_text));
2305 }
2306 }
2307 }
2308 if edits.is_empty() {
2309 return None;
2310 }
2311
2312 self.start_transaction();
2313 self.pending_autoindent.take();
2314 let autoindent_request = autoindent_mode
2315 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2316
2317 let edit_operation = self.text.edit(edits.iter().cloned());
2318 let edit_id = edit_operation.timestamp();
2319
2320 if let Some((before_edit, mode)) = autoindent_request {
2321 let mut delta = 0isize;
2322 let mut previous_setting = None;
2323 let entries: Vec<_> = edits
2324 .into_iter()
2325 .enumerate()
2326 .zip(&edit_operation.as_edit().unwrap().new_text)
2327 .filter(|((_, (range, _)), _)| {
2328 let language = before_edit.language_at(range.start);
2329 let language_id = language.map(|l| l.id());
2330 if let Some((cached_language_id, auto_indent)) = previous_setting
2331 && cached_language_id == language_id
2332 {
2333 auto_indent
2334 } else {
2335 // The auto-indent setting is not present in editorconfigs, hence
2336 // we can avoid passing the file here.
2337 let auto_indent =
2338 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2339 previous_setting = Some((language_id, auto_indent));
2340 auto_indent
2341 }
2342 })
2343 .map(|((ix, (range, _)), new_text)| {
2344 let new_text_length = new_text.len();
2345 let old_start = range.start.to_point(&before_edit);
2346 let new_start = (delta + range.start as isize) as usize;
2347 let range_len = range.end - range.start;
2348 delta += new_text_length as isize - range_len as isize;
2349
2350 // Decide what range of the insertion to auto-indent, and whether
2351 // the first line of the insertion should be considered a newly-inserted line
2352 // or an edit to an existing line.
2353 let mut range_of_insertion_to_indent = 0..new_text_length;
2354 let mut first_line_is_new = true;
2355
2356 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2357 let old_line_end = before_edit.line_len(old_start.row);
2358
2359 if old_start.column > old_line_start {
2360 first_line_is_new = false;
2361 }
2362
2363 if !new_text.contains('\n')
2364 && (old_start.column + (range_len as u32) < old_line_end
2365 || old_line_end == old_line_start)
2366 {
2367 first_line_is_new = false;
2368 }
2369
2370 // When inserting text starting with a newline, avoid auto-indenting the
2371 // previous line.
2372 if new_text.starts_with('\n') {
2373 range_of_insertion_to_indent.start += 1;
2374 first_line_is_new = true;
2375 }
2376
2377 let mut original_indent_column = None;
2378 if let AutoindentMode::Block {
2379 original_indent_columns,
2380 } = &mode
2381 {
2382 original_indent_column = Some(if new_text.starts_with('\n') {
2383 indent_size_for_text(
2384 new_text[range_of_insertion_to_indent.clone()].chars(),
2385 )
2386 .len
2387 } else {
2388 original_indent_columns
2389 .get(ix)
2390 .copied()
2391 .flatten()
2392 .unwrap_or_else(|| {
2393 indent_size_for_text(
2394 new_text[range_of_insertion_to_indent.clone()].chars(),
2395 )
2396 .len
2397 })
2398 });
2399
2400 // Avoid auto-indenting the line after the edit.
2401 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2402 range_of_insertion_to_indent.end -= 1;
2403 }
2404 }
2405
2406 AutoindentRequestEntry {
2407 first_line_is_new,
2408 original_indent_column,
2409 indent_size: before_edit.language_indent_size_at(range.start, cx),
2410 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2411 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2412 }
2413 })
2414 .collect();
2415
2416 if !entries.is_empty() {
2417 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2418 before_edit,
2419 entries,
2420 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2421 ignore_empty_lines: false,
2422 }));
2423 }
2424 }
2425
2426 self.end_transaction(cx);
2427 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2428 Some(edit_id)
2429 }
2430
2431 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2432 self.was_changed();
2433
2434 if self.edits_since::<usize>(old_version).next().is_none() {
2435 return;
2436 }
2437
2438 self.reparse(cx);
2439 cx.emit(BufferEvent::Edited);
2440 if was_dirty != self.is_dirty() {
2441 cx.emit(BufferEvent::DirtyChanged);
2442 }
2443 cx.notify();
2444 }
2445
2446 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2447 where
2448 I: IntoIterator<Item = Range<T>>,
2449 T: ToOffset + Copy,
2450 {
2451 let before_edit = self.snapshot();
2452 let entries = ranges
2453 .into_iter()
2454 .map(|range| AutoindentRequestEntry {
2455 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2456 first_line_is_new: true,
2457 indent_size: before_edit.language_indent_size_at(range.start, cx),
2458 original_indent_column: None,
2459 })
2460 .collect();
2461 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2462 before_edit,
2463 entries,
2464 is_block_mode: false,
2465 ignore_empty_lines: true,
2466 }));
2467 self.request_autoindent(cx);
2468 }
2469
2470 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2471 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2472 pub fn insert_empty_line(
2473 &mut self,
2474 position: impl ToPoint,
2475 space_above: bool,
2476 space_below: bool,
2477 cx: &mut Context<Self>,
2478 ) -> Point {
2479 let mut position = position.to_point(self);
2480
2481 self.start_transaction();
2482
2483 self.edit(
2484 [(position..position, "\n")],
2485 Some(AutoindentMode::EachLine),
2486 cx,
2487 );
2488
2489 if position.column > 0 {
2490 position += Point::new(1, 0);
2491 }
2492
2493 if !self.is_line_blank(position.row) {
2494 self.edit(
2495 [(position..position, "\n")],
2496 Some(AutoindentMode::EachLine),
2497 cx,
2498 );
2499 }
2500
2501 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2502 self.edit(
2503 [(position..position, "\n")],
2504 Some(AutoindentMode::EachLine),
2505 cx,
2506 );
2507 position.row += 1;
2508 }
2509
2510 if space_below
2511 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2512 {
2513 self.edit(
2514 [(position..position, "\n")],
2515 Some(AutoindentMode::EachLine),
2516 cx,
2517 );
2518 }
2519
2520 self.end_transaction(cx);
2521
2522 position
2523 }
2524
2525 /// Applies the given remote operations to the buffer.
2526 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2527 self.pending_autoindent.take();
2528 let was_dirty = self.is_dirty();
2529 let old_version = self.version.clone();
2530 let mut deferred_ops = Vec::new();
2531 let buffer_ops = ops
2532 .into_iter()
2533 .filter_map(|op| match op {
2534 Operation::Buffer(op) => Some(op),
2535 _ => {
2536 if self.can_apply_op(&op) {
2537 self.apply_op(op, cx);
2538 } else {
2539 deferred_ops.push(op);
2540 }
2541 None
2542 }
2543 })
2544 .collect::<Vec<_>>();
2545 for operation in buffer_ops.iter() {
2546 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2547 }
2548 self.text.apply_ops(buffer_ops);
2549 self.deferred_ops.insert(deferred_ops);
2550 self.flush_deferred_ops(cx);
2551 self.did_edit(&old_version, was_dirty, cx);
2552 // Notify independently of whether the buffer was edited as the operations could include a
2553 // selection update.
2554 cx.notify();
2555 }
2556
2557 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2558 let mut deferred_ops = Vec::new();
2559 for op in self.deferred_ops.drain().iter().cloned() {
2560 if self.can_apply_op(&op) {
2561 self.apply_op(op, cx);
2562 } else {
2563 deferred_ops.push(op);
2564 }
2565 }
2566 self.deferred_ops.insert(deferred_ops);
2567 }
2568
2569 pub fn has_deferred_ops(&self) -> bool {
2570 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2571 }
2572
2573 fn can_apply_op(&self, operation: &Operation) -> bool {
2574 match operation {
2575 Operation::Buffer(_) => {
2576 unreachable!("buffer operations should never be applied at this layer")
2577 }
2578 Operation::UpdateDiagnostics {
2579 diagnostics: diagnostic_set,
2580 ..
2581 } => diagnostic_set.iter().all(|diagnostic| {
2582 self.text.can_resolve(&diagnostic.range.start)
2583 && self.text.can_resolve(&diagnostic.range.end)
2584 }),
2585 Operation::UpdateSelections { selections, .. } => selections
2586 .iter()
2587 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2588 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2589 }
2590 }
2591
2592 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2593 match operation {
2594 Operation::Buffer(_) => {
2595 unreachable!("buffer operations should never be applied at this layer")
2596 }
2597 Operation::UpdateDiagnostics {
2598 server_id,
2599 diagnostics: diagnostic_set,
2600 lamport_timestamp,
2601 } => {
2602 let snapshot = self.snapshot();
2603 self.apply_diagnostic_update(
2604 server_id,
2605 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2606 lamport_timestamp,
2607 cx,
2608 );
2609 }
2610 Operation::UpdateSelections {
2611 selections,
2612 lamport_timestamp,
2613 line_mode,
2614 cursor_shape,
2615 } => {
2616 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2617 && set.lamport_timestamp > lamport_timestamp
2618 {
2619 return;
2620 }
2621
2622 self.remote_selections.insert(
2623 lamport_timestamp.replica_id,
2624 SelectionSet {
2625 selections,
2626 lamport_timestamp,
2627 line_mode,
2628 cursor_shape,
2629 },
2630 );
2631 self.text.lamport_clock.observe(lamport_timestamp);
2632 self.non_text_state_update_count += 1;
2633 }
2634 Operation::UpdateCompletionTriggers {
2635 triggers,
2636 lamport_timestamp,
2637 server_id,
2638 } => {
2639 if triggers.is_empty() {
2640 self.completion_triggers_per_language_server
2641 .remove(&server_id);
2642 self.completion_triggers = self
2643 .completion_triggers_per_language_server
2644 .values()
2645 .flat_map(|triggers| triggers.iter().cloned())
2646 .collect();
2647 } else {
2648 self.completion_triggers_per_language_server
2649 .insert(server_id, triggers.iter().cloned().collect());
2650 self.completion_triggers.extend(triggers);
2651 }
2652 self.text.lamport_clock.observe(lamport_timestamp);
2653 }
2654 Operation::UpdateLineEnding {
2655 line_ending,
2656 lamport_timestamp,
2657 } => {
2658 self.text.set_line_ending(line_ending);
2659 self.text.lamport_clock.observe(lamport_timestamp);
2660 }
2661 }
2662 }
2663
2664 fn apply_diagnostic_update(
2665 &mut self,
2666 server_id: LanguageServerId,
2667 diagnostics: DiagnosticSet,
2668 lamport_timestamp: clock::Lamport,
2669 cx: &mut Context<Self>,
2670 ) {
2671 if lamport_timestamp > self.diagnostics_timestamp {
2672 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2673 if diagnostics.is_empty() {
2674 if let Ok(ix) = ix {
2675 self.diagnostics.remove(ix);
2676 }
2677 } else {
2678 match ix {
2679 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2680 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2681 };
2682 }
2683 self.diagnostics_timestamp = lamport_timestamp;
2684 self.non_text_state_update_count += 1;
2685 self.text.lamport_clock.observe(lamport_timestamp);
2686 cx.notify();
2687 cx.emit(BufferEvent::DiagnosticsUpdated);
2688 }
2689 }
2690
2691 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2692 self.was_changed();
2693 cx.emit(BufferEvent::Operation {
2694 operation,
2695 is_local,
2696 });
2697 }
2698
2699 /// Removes the selections for a given peer.
2700 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2701 self.remote_selections.remove(&replica_id);
2702 cx.notify();
2703 }
2704
2705 /// Undoes the most recent transaction.
2706 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2707 let was_dirty = self.is_dirty();
2708 let old_version = self.version.clone();
2709
2710 if let Some((transaction_id, operation)) = self.text.undo() {
2711 self.send_operation(Operation::Buffer(operation), true, cx);
2712 self.did_edit(&old_version, was_dirty, cx);
2713 Some(transaction_id)
2714 } else {
2715 None
2716 }
2717 }
2718
2719 /// Manually undoes a specific transaction in the buffer's undo history.
2720 pub fn undo_transaction(
2721 &mut self,
2722 transaction_id: TransactionId,
2723 cx: &mut Context<Self>,
2724 ) -> bool {
2725 let was_dirty = self.is_dirty();
2726 let old_version = self.version.clone();
2727 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2728 self.send_operation(Operation::Buffer(operation), true, cx);
2729 self.did_edit(&old_version, was_dirty, cx);
2730 true
2731 } else {
2732 false
2733 }
2734 }
2735
2736 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2737 pub fn undo_to_transaction(
2738 &mut self,
2739 transaction_id: TransactionId,
2740 cx: &mut Context<Self>,
2741 ) -> bool {
2742 let was_dirty = self.is_dirty();
2743 let old_version = self.version.clone();
2744
2745 let operations = self.text.undo_to_transaction(transaction_id);
2746 let undone = !operations.is_empty();
2747 for operation in operations {
2748 self.send_operation(Operation::Buffer(operation), true, cx);
2749 }
2750 if undone {
2751 self.did_edit(&old_version, was_dirty, cx)
2752 }
2753 undone
2754 }
2755
2756 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2757 let was_dirty = self.is_dirty();
2758 let operation = self.text.undo_operations(counts);
2759 let old_version = self.version.clone();
2760 self.send_operation(Operation::Buffer(operation), true, cx);
2761 self.did_edit(&old_version, was_dirty, cx);
2762 }
2763
2764 /// Manually redoes a specific transaction in the buffer's redo history.
2765 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2766 let was_dirty = self.is_dirty();
2767 let old_version = self.version.clone();
2768
2769 if let Some((transaction_id, operation)) = self.text.redo() {
2770 self.send_operation(Operation::Buffer(operation), true, cx);
2771 self.did_edit(&old_version, was_dirty, cx);
2772 Some(transaction_id)
2773 } else {
2774 None
2775 }
2776 }
2777
2778 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2779 pub fn redo_to_transaction(
2780 &mut self,
2781 transaction_id: TransactionId,
2782 cx: &mut Context<Self>,
2783 ) -> bool {
2784 let was_dirty = self.is_dirty();
2785 let old_version = self.version.clone();
2786
2787 let operations = self.text.redo_to_transaction(transaction_id);
2788 let redone = !operations.is_empty();
2789 for operation in operations {
2790 self.send_operation(Operation::Buffer(operation), true, cx);
2791 }
2792 if redone {
2793 self.did_edit(&old_version, was_dirty, cx)
2794 }
2795 redone
2796 }
2797
2798 /// Override current completion triggers with the user-provided completion triggers.
2799 pub fn set_completion_triggers(
2800 &mut self,
2801 server_id: LanguageServerId,
2802 triggers: BTreeSet<String>,
2803 cx: &mut Context<Self>,
2804 ) {
2805 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2806 if triggers.is_empty() {
2807 self.completion_triggers_per_language_server
2808 .remove(&server_id);
2809 self.completion_triggers = self
2810 .completion_triggers_per_language_server
2811 .values()
2812 .flat_map(|triggers| triggers.iter().cloned())
2813 .collect();
2814 } else {
2815 self.completion_triggers_per_language_server
2816 .insert(server_id, triggers.clone());
2817 self.completion_triggers.extend(triggers.iter().cloned());
2818 }
2819 self.send_operation(
2820 Operation::UpdateCompletionTriggers {
2821 triggers: triggers.into_iter().collect(),
2822 lamport_timestamp: self.completion_triggers_timestamp,
2823 server_id,
2824 },
2825 true,
2826 cx,
2827 );
2828 cx.notify();
2829 }
2830
2831 /// Returns a list of strings which trigger a completion menu for this language.
2832 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2833 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2834 &self.completion_triggers
2835 }
2836
2837 /// Call this directly after performing edits to prevent the preview tab
2838 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2839 /// to return false until there are additional edits.
2840 pub fn refresh_preview(&mut self) {
2841 self.preview_version = self.version.clone();
2842 }
2843
2844 /// Whether we should preserve the preview status of a tab containing this buffer.
2845 pub fn preserve_preview(&self) -> bool {
2846 !self.has_edits_since(&self.preview_version)
2847 }
2848}
2849
2850#[doc(hidden)]
2851#[cfg(any(test, feature = "test-support"))]
2852impl Buffer {
2853 pub fn edit_via_marked_text(
2854 &mut self,
2855 marked_string: &str,
2856 autoindent_mode: Option<AutoindentMode>,
2857 cx: &mut Context<Self>,
2858 ) {
2859 let edits = self.edits_for_marked_text(marked_string);
2860 self.edit(edits, autoindent_mode, cx);
2861 }
2862
2863 pub fn set_group_interval(&mut self, group_interval: Duration) {
2864 self.text.set_group_interval(group_interval);
2865 }
2866
2867 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2868 where
2869 T: rand::Rng,
2870 {
2871 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2872 let mut last_end = None;
2873 for _ in 0..old_range_count {
2874 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2875 break;
2876 }
2877
2878 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2879 let mut range = self.random_byte_range(new_start, rng);
2880 if rng.random_bool(0.2) {
2881 mem::swap(&mut range.start, &mut range.end);
2882 }
2883 last_end = Some(range.end);
2884
2885 let new_text_len = rng.random_range(0..10);
2886 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2887 new_text = new_text.to_uppercase();
2888
2889 edits.push((range, new_text));
2890 }
2891 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2892 self.edit(edits, None, cx);
2893 }
2894
2895 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2896 let was_dirty = self.is_dirty();
2897 let old_version = self.version.clone();
2898
2899 let ops = self.text.randomly_undo_redo(rng);
2900 if !ops.is_empty() {
2901 for op in ops {
2902 self.send_operation(Operation::Buffer(op), true, cx);
2903 self.did_edit(&old_version, was_dirty, cx);
2904 }
2905 }
2906 }
2907}
2908
2909impl EventEmitter<BufferEvent> for Buffer {}
2910
2911impl Deref for Buffer {
2912 type Target = TextBuffer;
2913
2914 fn deref(&self) -> &Self::Target {
2915 &self.text
2916 }
2917}
2918
2919impl BufferSnapshot {
2920 /// Returns [`IndentSize`] for a given line that respects user settings and
2921 /// language preferences.
2922 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2923 indent_size_for_line(self, row)
2924 }
2925
2926 /// Returns [`IndentSize`] for a given position that respects user settings
2927 /// and language preferences.
2928 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2929 let settings = language_settings(
2930 self.language_at(position).map(|l| l.name()),
2931 self.file(),
2932 cx,
2933 );
2934 if settings.hard_tabs {
2935 IndentSize::tab()
2936 } else {
2937 IndentSize::spaces(settings.tab_size.get())
2938 }
2939 }
2940
2941 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2942 /// is passed in as `single_indent_size`.
2943 pub fn suggested_indents(
2944 &self,
2945 rows: impl Iterator<Item = u32>,
2946 single_indent_size: IndentSize,
2947 ) -> BTreeMap<u32, IndentSize> {
2948 let mut result = BTreeMap::new();
2949
2950 for row_range in contiguous_ranges(rows, 10) {
2951 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2952 Some(suggestions) => suggestions,
2953 _ => break,
2954 };
2955
2956 for (row, suggestion) in row_range.zip(suggestions) {
2957 let indent_size = if let Some(suggestion) = suggestion {
2958 result
2959 .get(&suggestion.basis_row)
2960 .copied()
2961 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2962 .with_delta(suggestion.delta, single_indent_size)
2963 } else {
2964 self.indent_size_for_line(row)
2965 };
2966
2967 result.insert(row, indent_size);
2968 }
2969 }
2970
2971 result
2972 }
2973
2974 fn suggest_autoindents(
2975 &self,
2976 row_range: Range<u32>,
2977 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2978 let config = &self.language.as_ref()?.config;
2979 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2980
2981 #[derive(Debug, Clone)]
2982 struct StartPosition {
2983 start: Point,
2984 suffix: SharedString,
2985 }
2986
2987 // Find the suggested indentation ranges based on the syntax tree.
2988 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2989 let end = Point::new(row_range.end, 0);
2990 let range = (start..end).to_offset(&self.text);
2991 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2992 Some(&grammar.indents_config.as_ref()?.query)
2993 });
2994 let indent_configs = matches
2995 .grammars()
2996 .iter()
2997 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2998 .collect::<Vec<_>>();
2999
3000 let mut indent_ranges = Vec::<Range<Point>>::new();
3001 let mut start_positions = Vec::<StartPosition>::new();
3002 let mut outdent_positions = Vec::<Point>::new();
3003 while let Some(mat) = matches.peek() {
3004 let mut start: Option<Point> = None;
3005 let mut end: Option<Point> = None;
3006
3007 let config = indent_configs[mat.grammar_index];
3008 for capture in mat.captures {
3009 if capture.index == config.indent_capture_ix {
3010 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3011 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3012 } else if Some(capture.index) == config.start_capture_ix {
3013 start = Some(Point::from_ts_point(capture.node.end_position()));
3014 } else if Some(capture.index) == config.end_capture_ix {
3015 end = Some(Point::from_ts_point(capture.node.start_position()));
3016 } else if Some(capture.index) == config.outdent_capture_ix {
3017 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3018 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3019 start_positions.push(StartPosition {
3020 start: Point::from_ts_point(capture.node.start_position()),
3021 suffix: suffix.clone(),
3022 });
3023 }
3024 }
3025
3026 matches.advance();
3027 if let Some((start, end)) = start.zip(end) {
3028 if start.row == end.row {
3029 continue;
3030 }
3031 let range = start..end;
3032 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3033 Err(ix) => indent_ranges.insert(ix, range),
3034 Ok(ix) => {
3035 let prev_range = &mut indent_ranges[ix];
3036 prev_range.end = prev_range.end.max(range.end);
3037 }
3038 }
3039 }
3040 }
3041
3042 let mut error_ranges = Vec::<Range<Point>>::new();
3043 let mut matches = self
3044 .syntax
3045 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3046 while let Some(mat) = matches.peek() {
3047 let node = mat.captures[0].node;
3048 let start = Point::from_ts_point(node.start_position());
3049 let end = Point::from_ts_point(node.end_position());
3050 let range = start..end;
3051 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3052 Ok(ix) | Err(ix) => ix,
3053 };
3054 let mut end_ix = ix;
3055 while let Some(existing_range) = error_ranges.get(end_ix) {
3056 if existing_range.end < end {
3057 end_ix += 1;
3058 } else {
3059 break;
3060 }
3061 }
3062 error_ranges.splice(ix..end_ix, [range]);
3063 matches.advance();
3064 }
3065
3066 outdent_positions.sort();
3067 for outdent_position in outdent_positions {
3068 // find the innermost indent range containing this outdent_position
3069 // set its end to the outdent position
3070 if let Some(range_to_truncate) = indent_ranges
3071 .iter_mut()
3072 .filter(|indent_range| indent_range.contains(&outdent_position))
3073 .next_back()
3074 {
3075 range_to_truncate.end = outdent_position;
3076 }
3077 }
3078
3079 start_positions.sort_by_key(|b| b.start);
3080
3081 // Find the suggested indentation increases and decreased based on regexes.
3082 let mut regex_outdent_map = HashMap::default();
3083 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3084 let mut start_positions_iter = start_positions.iter().peekable();
3085
3086 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3087 self.for_each_line(
3088 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3089 ..Point::new(row_range.end, 0),
3090 |row, line| {
3091 if config
3092 .decrease_indent_pattern
3093 .as_ref()
3094 .is_some_and(|regex| regex.is_match(line))
3095 {
3096 indent_change_rows.push((row, Ordering::Less));
3097 }
3098 if config
3099 .increase_indent_pattern
3100 .as_ref()
3101 .is_some_and(|regex| regex.is_match(line))
3102 {
3103 indent_change_rows.push((row + 1, Ordering::Greater));
3104 }
3105 while let Some(pos) = start_positions_iter.peek() {
3106 if pos.start.row < row {
3107 let pos = start_positions_iter.next().unwrap();
3108 last_seen_suffix
3109 .entry(pos.suffix.to_string())
3110 .or_default()
3111 .push(pos.start);
3112 } else {
3113 break;
3114 }
3115 }
3116 for rule in &config.decrease_indent_patterns {
3117 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3118 let row_start_column = self.indent_size_for_line(row).len;
3119 let basis_row = rule
3120 .valid_after
3121 .iter()
3122 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3123 .flatten()
3124 .filter(|start_point| start_point.column <= row_start_column)
3125 .max_by_key(|start_point| start_point.row);
3126 if let Some(outdent_to_row) = basis_row {
3127 regex_outdent_map.insert(row, outdent_to_row.row);
3128 }
3129 break;
3130 }
3131 }
3132 },
3133 );
3134
3135 let mut indent_changes = indent_change_rows.into_iter().peekable();
3136 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3137 prev_non_blank_row.unwrap_or(0)
3138 } else {
3139 row_range.start.saturating_sub(1)
3140 };
3141
3142 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3143 Some(row_range.map(move |row| {
3144 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3145
3146 let mut indent_from_prev_row = false;
3147 let mut outdent_from_prev_row = false;
3148 let mut outdent_to_row = u32::MAX;
3149 let mut from_regex = false;
3150
3151 while let Some((indent_row, delta)) = indent_changes.peek() {
3152 match indent_row.cmp(&row) {
3153 Ordering::Equal => match delta {
3154 Ordering::Less => {
3155 from_regex = true;
3156 outdent_from_prev_row = true
3157 }
3158 Ordering::Greater => {
3159 indent_from_prev_row = true;
3160 from_regex = true
3161 }
3162 _ => {}
3163 },
3164
3165 Ordering::Greater => break,
3166 Ordering::Less => {}
3167 }
3168
3169 indent_changes.next();
3170 }
3171
3172 for range in &indent_ranges {
3173 if range.start.row >= row {
3174 break;
3175 }
3176 if range.start.row == prev_row && range.end > row_start {
3177 indent_from_prev_row = true;
3178 }
3179 if range.end > prev_row_start && range.end <= row_start {
3180 outdent_to_row = outdent_to_row.min(range.start.row);
3181 }
3182 }
3183
3184 if let Some(basis_row) = regex_outdent_map.get(&row) {
3185 indent_from_prev_row = false;
3186 outdent_to_row = *basis_row;
3187 from_regex = true;
3188 }
3189
3190 let within_error = error_ranges
3191 .iter()
3192 .any(|e| e.start.row < row && e.end > row_start);
3193
3194 let suggestion = if outdent_to_row == prev_row
3195 || (outdent_from_prev_row && indent_from_prev_row)
3196 {
3197 Some(IndentSuggestion {
3198 basis_row: prev_row,
3199 delta: Ordering::Equal,
3200 within_error: within_error && !from_regex,
3201 })
3202 } else if indent_from_prev_row {
3203 Some(IndentSuggestion {
3204 basis_row: prev_row,
3205 delta: Ordering::Greater,
3206 within_error: within_error && !from_regex,
3207 })
3208 } else if outdent_to_row < prev_row {
3209 Some(IndentSuggestion {
3210 basis_row: outdent_to_row,
3211 delta: Ordering::Equal,
3212 within_error: within_error && !from_regex,
3213 })
3214 } else if outdent_from_prev_row {
3215 Some(IndentSuggestion {
3216 basis_row: prev_row,
3217 delta: Ordering::Less,
3218 within_error: within_error && !from_regex,
3219 })
3220 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3221 {
3222 Some(IndentSuggestion {
3223 basis_row: prev_row,
3224 delta: Ordering::Equal,
3225 within_error: within_error && !from_regex,
3226 })
3227 } else {
3228 None
3229 };
3230
3231 prev_row = row;
3232 prev_row_start = row_start;
3233 suggestion
3234 }))
3235 }
3236
3237 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3238 while row > 0 {
3239 row -= 1;
3240 if !self.is_line_blank(row) {
3241 return Some(row);
3242 }
3243 }
3244 None
3245 }
3246
3247 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3248 let captures = self.syntax.captures(range, &self.text, |grammar| {
3249 grammar.highlights_query.as_ref()
3250 });
3251 let highlight_maps = captures
3252 .grammars()
3253 .iter()
3254 .map(|grammar| grammar.highlight_map())
3255 .collect();
3256 (captures, highlight_maps)
3257 }
3258
3259 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3260 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3261 /// returned in chunks where each chunk has a single syntax highlighting style and
3262 /// diagnostic status.
3263 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3264 let range = range.start.to_offset(self)..range.end.to_offset(self);
3265
3266 let mut syntax = None;
3267 if language_aware {
3268 syntax = Some(self.get_highlights(range.clone()));
3269 }
3270 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3271 let diagnostics = language_aware;
3272 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3273 }
3274
3275 pub fn highlighted_text_for_range<T: ToOffset>(
3276 &self,
3277 range: Range<T>,
3278 override_style: Option<HighlightStyle>,
3279 syntax_theme: &SyntaxTheme,
3280 ) -> HighlightedText {
3281 HighlightedText::from_buffer_range(
3282 range,
3283 &self.text,
3284 &self.syntax,
3285 override_style,
3286 syntax_theme,
3287 )
3288 }
3289
3290 /// Invokes the given callback for each line of text in the given range of the buffer.
3291 /// Uses callback to avoid allocating a string for each line.
3292 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3293 let mut line = String::new();
3294 let mut row = range.start.row;
3295 for chunk in self
3296 .as_rope()
3297 .chunks_in_range(range.to_offset(self))
3298 .chain(["\n"])
3299 {
3300 for (newline_ix, text) in chunk.split('\n').enumerate() {
3301 if newline_ix > 0 {
3302 callback(row, &line);
3303 row += 1;
3304 line.clear();
3305 }
3306 line.push_str(text);
3307 }
3308 }
3309 }
3310
3311 /// Iterates over every [`SyntaxLayer`] in the buffer.
3312 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3313 self.syntax_layers_for_range(0..self.len(), true)
3314 }
3315
3316 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3317 let offset = position.to_offset(self);
3318 self.syntax_layers_for_range(offset..offset, false)
3319 .filter(|l| l.node().end_byte() > offset)
3320 .last()
3321 }
3322
3323 pub fn syntax_layers_for_range<D: ToOffset>(
3324 &self,
3325 range: Range<D>,
3326 include_hidden: bool,
3327 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3328 self.syntax
3329 .layers_for_range(range, &self.text, include_hidden)
3330 }
3331
3332 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3333 &self,
3334 range: Range<D>,
3335 ) -> Option<SyntaxLayer<'_>> {
3336 let range = range.to_offset(self);
3337 self.syntax
3338 .layers_for_range(range, &self.text, false)
3339 .max_by(|a, b| {
3340 if a.depth != b.depth {
3341 a.depth.cmp(&b.depth)
3342 } else if a.offset.0 != b.offset.0 {
3343 a.offset.0.cmp(&b.offset.0)
3344 } else {
3345 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3346 }
3347 })
3348 }
3349
3350 /// Returns the main [`Language`].
3351 pub fn language(&self) -> Option<&Arc<Language>> {
3352 self.language.as_ref()
3353 }
3354
3355 /// Returns the [`Language`] at the given location.
3356 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3357 self.syntax_layer_at(position)
3358 .map(|info| info.language)
3359 .or(self.language.as_ref())
3360 }
3361
3362 /// Returns the settings for the language at the given location.
3363 pub fn settings_at<'a, D: ToOffset>(
3364 &'a self,
3365 position: D,
3366 cx: &'a App,
3367 ) -> Cow<'a, LanguageSettings> {
3368 language_settings(
3369 self.language_at(position).map(|l| l.name()),
3370 self.file.as_ref(),
3371 cx,
3372 )
3373 }
3374
3375 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3376 CharClassifier::new(self.language_scope_at(point))
3377 }
3378
3379 /// Returns the [`LanguageScope`] at the given location.
3380 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3381 let offset = position.to_offset(self);
3382 let mut scope = None;
3383 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3384
3385 // Use the layer that has the smallest node intersecting the given point.
3386 for layer in self
3387 .syntax
3388 .layers_for_range(offset..offset, &self.text, false)
3389 {
3390 let mut cursor = layer.node().walk();
3391
3392 let mut range = None;
3393 loop {
3394 let child_range = cursor.node().byte_range();
3395 if !child_range.contains(&offset) {
3396 break;
3397 }
3398
3399 range = Some(child_range);
3400 if cursor.goto_first_child_for_byte(offset).is_none() {
3401 break;
3402 }
3403 }
3404
3405 if let Some(range) = range
3406 && smallest_range_and_depth.as_ref().is_none_or(
3407 |(smallest_range, smallest_range_depth)| {
3408 if layer.depth > *smallest_range_depth {
3409 true
3410 } else if layer.depth == *smallest_range_depth {
3411 range.len() < smallest_range.len()
3412 } else {
3413 false
3414 }
3415 },
3416 )
3417 {
3418 smallest_range_and_depth = Some((range, layer.depth));
3419 scope = Some(LanguageScope {
3420 language: layer.language.clone(),
3421 override_id: layer.override_id(offset, &self.text),
3422 });
3423 }
3424 }
3425
3426 scope.or_else(|| {
3427 self.language.clone().map(|language| LanguageScope {
3428 language,
3429 override_id: None,
3430 })
3431 })
3432 }
3433
3434 /// Returns a tuple of the range and character kind of the word
3435 /// surrounding the given position.
3436 pub fn surrounding_word<T: ToOffset>(
3437 &self,
3438 start: T,
3439 for_completion: bool,
3440 ) -> (Range<usize>, Option<CharKind>) {
3441 let mut start = start.to_offset(self);
3442 let mut end = start;
3443 let mut next_chars = self.chars_at(start).take(128).peekable();
3444 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3445
3446 let classifier = self
3447 .char_classifier_at(start)
3448 .for_completion(for_completion);
3449 let word_kind = cmp::max(
3450 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3451 next_chars.peek().copied().map(|c| classifier.kind(c)),
3452 );
3453
3454 for ch in prev_chars {
3455 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3456 start -= ch.len_utf8();
3457 } else {
3458 break;
3459 }
3460 }
3461
3462 for ch in next_chars {
3463 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3464 end += ch.len_utf8();
3465 } else {
3466 break;
3467 }
3468 }
3469
3470 (start..end, word_kind)
3471 }
3472
3473 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3474 /// range. When `require_larger` is true, the node found must be larger than the query range.
3475 ///
3476 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3477 /// be moved to the root of the tree.
3478 fn goto_node_enclosing_range(
3479 cursor: &mut tree_sitter::TreeCursor,
3480 query_range: &Range<usize>,
3481 require_larger: bool,
3482 ) -> bool {
3483 let mut ascending = false;
3484 loop {
3485 let mut range = cursor.node().byte_range();
3486 if query_range.is_empty() {
3487 // When the query range is empty and the current node starts after it, move to the
3488 // previous sibling to find the node the containing node.
3489 if range.start > query_range.start {
3490 cursor.goto_previous_sibling();
3491 range = cursor.node().byte_range();
3492 }
3493 } else {
3494 // When the query range is non-empty and the current node ends exactly at the start,
3495 // move to the next sibling to find a node that extends beyond the start.
3496 if range.end == query_range.start {
3497 cursor.goto_next_sibling();
3498 range = cursor.node().byte_range();
3499 }
3500 }
3501
3502 let encloses = range.contains_inclusive(query_range)
3503 && (!require_larger || range.len() > query_range.len());
3504 if !encloses {
3505 ascending = true;
3506 if !cursor.goto_parent() {
3507 return false;
3508 }
3509 continue;
3510 } else if ascending {
3511 return true;
3512 }
3513
3514 // Descend into the current node.
3515 if cursor
3516 .goto_first_child_for_byte(query_range.start)
3517 .is_none()
3518 {
3519 return true;
3520 }
3521 }
3522 }
3523
3524 pub fn syntax_ancestor<'a, T: ToOffset>(
3525 &'a self,
3526 range: Range<T>,
3527 ) -> Option<tree_sitter::Node<'a>> {
3528 let range = range.start.to_offset(self)..range.end.to_offset(self);
3529 let mut result: Option<tree_sitter::Node<'a>> = None;
3530 for layer in self
3531 .syntax
3532 .layers_for_range(range.clone(), &self.text, true)
3533 {
3534 let mut cursor = layer.node().walk();
3535
3536 // Find the node that both contains the range and is larger than it.
3537 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3538 continue;
3539 }
3540
3541 let left_node = cursor.node();
3542 let mut layer_result = left_node;
3543
3544 // For an empty range, try to find another node immediately to the right of the range.
3545 if left_node.end_byte() == range.start {
3546 let mut right_node = None;
3547 while !cursor.goto_next_sibling() {
3548 if !cursor.goto_parent() {
3549 break;
3550 }
3551 }
3552
3553 while cursor.node().start_byte() == range.start {
3554 right_node = Some(cursor.node());
3555 if !cursor.goto_first_child() {
3556 break;
3557 }
3558 }
3559
3560 // If there is a candidate node on both sides of the (empty) range, then
3561 // decide between the two by favoring a named node over an anonymous token.
3562 // If both nodes are the same in that regard, favor the right one.
3563 if let Some(right_node) = right_node
3564 && (right_node.is_named() || !left_node.is_named())
3565 {
3566 layer_result = right_node;
3567 }
3568 }
3569
3570 if let Some(previous_result) = &result
3571 && previous_result.byte_range().len() < layer_result.byte_range().len()
3572 {
3573 continue;
3574 }
3575 result = Some(layer_result);
3576 }
3577
3578 result
3579 }
3580
3581 /// Find the previous sibling syntax node at the given range.
3582 ///
3583 /// This function locates the syntax node that precedes the node containing
3584 /// the given range. It searches hierarchically by:
3585 /// 1. Finding the node that contains the given range
3586 /// 2. Looking for the previous sibling at the same tree level
3587 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3588 ///
3589 /// Returns `None` if there is no previous sibling at any ancestor level.
3590 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3591 &'a self,
3592 range: Range<T>,
3593 ) -> Option<tree_sitter::Node<'a>> {
3594 let range = range.start.to_offset(self)..range.end.to_offset(self);
3595 let mut result: Option<tree_sitter::Node<'a>> = None;
3596
3597 for layer in self
3598 .syntax
3599 .layers_for_range(range.clone(), &self.text, true)
3600 {
3601 let mut cursor = layer.node().walk();
3602
3603 // Find the node that contains the range
3604 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3605 continue;
3606 }
3607
3608 // Look for the previous sibling, moving up ancestor levels if needed
3609 loop {
3610 if cursor.goto_previous_sibling() {
3611 let layer_result = cursor.node();
3612
3613 if let Some(previous_result) = &result {
3614 if previous_result.byte_range().end < layer_result.byte_range().end {
3615 continue;
3616 }
3617 }
3618 result = Some(layer_result);
3619 break;
3620 }
3621
3622 // No sibling found at this level, try moving up to parent
3623 if !cursor.goto_parent() {
3624 break;
3625 }
3626 }
3627 }
3628
3629 result
3630 }
3631
3632 /// Find the next sibling syntax node at the given range.
3633 ///
3634 /// This function locates the syntax node that follows the node containing
3635 /// the given range. It searches hierarchically by:
3636 /// 1. Finding the node that contains the given range
3637 /// 2. Looking for the next sibling at the same tree level
3638 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3639 ///
3640 /// Returns `None` if there is no next sibling at any ancestor level.
3641 pub fn syntax_next_sibling<'a, T: ToOffset>(
3642 &'a self,
3643 range: Range<T>,
3644 ) -> Option<tree_sitter::Node<'a>> {
3645 let range = range.start.to_offset(self)..range.end.to_offset(self);
3646 let mut result: Option<tree_sitter::Node<'a>> = None;
3647
3648 for layer in self
3649 .syntax
3650 .layers_for_range(range.clone(), &self.text, true)
3651 {
3652 let mut cursor = layer.node().walk();
3653
3654 // Find the node that contains the range
3655 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3656 continue;
3657 }
3658
3659 // Look for the next sibling, moving up ancestor levels if needed
3660 loop {
3661 if cursor.goto_next_sibling() {
3662 let layer_result = cursor.node();
3663
3664 if let Some(previous_result) = &result {
3665 if previous_result.byte_range().start > layer_result.byte_range().start {
3666 continue;
3667 }
3668 }
3669 result = Some(layer_result);
3670 break;
3671 }
3672
3673 // No sibling found at this level, try moving up to parent
3674 if !cursor.goto_parent() {
3675 break;
3676 }
3677 }
3678 }
3679
3680 result
3681 }
3682
3683 /// Returns the root syntax node within the given row
3684 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3685 let start_offset = position.to_offset(self);
3686
3687 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3688
3689 let layer = self
3690 .syntax
3691 .layers_for_range(start_offset..start_offset, &self.text, true)
3692 .next()?;
3693
3694 let mut cursor = layer.node().walk();
3695
3696 // Descend to the first leaf that touches the start of the range.
3697 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3698 if cursor.node().end_byte() == start_offset {
3699 cursor.goto_next_sibling();
3700 }
3701 }
3702
3703 // Ascend to the root node within the same row.
3704 while cursor.goto_parent() {
3705 if cursor.node().start_position().row != row {
3706 break;
3707 }
3708 }
3709
3710 Some(cursor.node())
3711 }
3712
3713 /// Returns the outline for the buffer.
3714 ///
3715 /// This method allows passing an optional [`SyntaxTheme`] to
3716 /// syntax-highlight the returned symbols.
3717 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3718 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3719 }
3720
3721 /// Returns all the symbols that contain the given position.
3722 ///
3723 /// This method allows passing an optional [`SyntaxTheme`] to
3724 /// syntax-highlight the returned symbols.
3725 pub fn symbols_containing<T: ToOffset>(
3726 &self,
3727 position: T,
3728 theme: Option<&SyntaxTheme>,
3729 ) -> Vec<OutlineItem<Anchor>> {
3730 let position = position.to_offset(self);
3731 let mut items = self.outline_items_containing(
3732 position.saturating_sub(1)..self.len().min(position + 1),
3733 false,
3734 theme,
3735 );
3736 let mut prev_depth = None;
3737 items.retain(|item| {
3738 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3739 prev_depth = Some(item.depth);
3740 result
3741 });
3742 items
3743 }
3744
3745 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3746 let range = range.to_offset(self);
3747 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3748 grammar.outline_config.as_ref().map(|c| &c.query)
3749 });
3750 let configs = matches
3751 .grammars()
3752 .iter()
3753 .map(|g| g.outline_config.as_ref().unwrap())
3754 .collect::<Vec<_>>();
3755
3756 while let Some(mat) = matches.peek() {
3757 let config = &configs[mat.grammar_index];
3758 let containing_item_node = maybe!({
3759 let item_node = mat.captures.iter().find_map(|cap| {
3760 if cap.index == config.item_capture_ix {
3761 Some(cap.node)
3762 } else {
3763 None
3764 }
3765 })?;
3766
3767 let item_byte_range = item_node.byte_range();
3768 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3769 None
3770 } else {
3771 Some(item_node)
3772 }
3773 });
3774
3775 if let Some(item_node) = containing_item_node {
3776 return Some(
3777 Point::from_ts_point(item_node.start_position())
3778 ..Point::from_ts_point(item_node.end_position()),
3779 );
3780 }
3781
3782 matches.advance();
3783 }
3784 None
3785 }
3786
3787 pub fn outline_items_containing<T: ToOffset>(
3788 &self,
3789 range: Range<T>,
3790 include_extra_context: bool,
3791 theme: Option<&SyntaxTheme>,
3792 ) -> Vec<OutlineItem<Anchor>> {
3793 let range = range.to_offset(self);
3794 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3795 grammar.outline_config.as_ref().map(|c| &c.query)
3796 });
3797
3798 let mut items = Vec::new();
3799 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3800 while let Some(mat) = matches.peek() {
3801 let config = matches.grammars()[mat.grammar_index]
3802 .outline_config
3803 .as_ref()
3804 .unwrap();
3805 if let Some(item) =
3806 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3807 {
3808 items.push(item);
3809 } else if let Some(capture) = mat
3810 .captures
3811 .iter()
3812 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3813 {
3814 let capture_range = capture.node.start_position()..capture.node.end_position();
3815 let mut capture_row_range =
3816 capture_range.start.row as u32..capture_range.end.row as u32;
3817 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3818 {
3819 capture_row_range.end -= 1;
3820 }
3821 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3822 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3823 last_row_range.end = capture_row_range.end;
3824 } else {
3825 annotation_row_ranges.push(capture_row_range);
3826 }
3827 } else {
3828 annotation_row_ranges.push(capture_row_range);
3829 }
3830 }
3831 matches.advance();
3832 }
3833
3834 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3835
3836 // Assign depths based on containment relationships and convert to anchors.
3837 let mut item_ends_stack = Vec::<Point>::new();
3838 let mut anchor_items = Vec::new();
3839 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3840 for item in items {
3841 while let Some(last_end) = item_ends_stack.last().copied() {
3842 if last_end < item.range.end {
3843 item_ends_stack.pop();
3844 } else {
3845 break;
3846 }
3847 }
3848
3849 let mut annotation_row_range = None;
3850 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3851 let row_preceding_item = item.range.start.row.saturating_sub(1);
3852 if next_annotation_row_range.end < row_preceding_item {
3853 annotation_row_ranges.next();
3854 } else {
3855 if next_annotation_row_range.end == row_preceding_item {
3856 annotation_row_range = Some(next_annotation_row_range.clone());
3857 annotation_row_ranges.next();
3858 }
3859 break;
3860 }
3861 }
3862
3863 anchor_items.push(OutlineItem {
3864 depth: item_ends_stack.len(),
3865 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3866 text: item.text,
3867 highlight_ranges: item.highlight_ranges,
3868 name_ranges: item.name_ranges,
3869 signature_range: item
3870 .signature_range
3871 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3872 body_range: item
3873 .body_range
3874 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3875 annotation_range: annotation_row_range.map(|annotation_range| {
3876 self.anchor_after(Point::new(annotation_range.start, 0))
3877 ..self.anchor_before(Point::new(
3878 annotation_range.end,
3879 self.line_len(annotation_range.end),
3880 ))
3881 }),
3882 });
3883 item_ends_stack.push(item.range.end);
3884 }
3885
3886 anchor_items
3887 }
3888
3889 fn next_outline_item(
3890 &self,
3891 config: &OutlineConfig,
3892 mat: &SyntaxMapMatch,
3893 range: &Range<usize>,
3894 include_extra_context: bool,
3895 theme: Option<&SyntaxTheme>,
3896 ) -> Option<OutlineItem<Point>> {
3897 let item_node = mat.captures.iter().find_map(|cap| {
3898 if cap.index == config.item_capture_ix {
3899 Some(cap.node)
3900 } else {
3901 None
3902 }
3903 })?;
3904
3905 let item_byte_range = item_node.byte_range();
3906 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3907 return None;
3908 }
3909 let item_point_range = Point::from_ts_point(item_node.start_position())
3910 ..Point::from_ts_point(item_node.end_position());
3911
3912 let mut open_point = None;
3913 let mut close_point = None;
3914
3915 let mut signature_start = None;
3916 let mut signature_end = None;
3917 let mut extend_signature_range = |node: tree_sitter::Node| {
3918 if signature_start.is_none() {
3919 signature_start = Some(Point::from_ts_point(node.start_position()));
3920 }
3921 signature_end = Some(Point::from_ts_point(node.end_position()));
3922 };
3923
3924 let mut buffer_ranges = Vec::new();
3925 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3926 let mut range = node.start_byte()..node.end_byte();
3927 let start = node.start_position();
3928 if node.end_position().row > start.row {
3929 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3930 }
3931
3932 if !range.is_empty() {
3933 buffer_ranges.push((range, node_is_name));
3934 }
3935 };
3936
3937 for capture in mat.captures {
3938 if capture.index == config.name_capture_ix {
3939 add_to_buffer_ranges(capture.node, true);
3940 extend_signature_range(capture.node);
3941 } else if Some(capture.index) == config.context_capture_ix
3942 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3943 {
3944 add_to_buffer_ranges(capture.node, false);
3945 extend_signature_range(capture.node);
3946 } else {
3947 if Some(capture.index) == config.open_capture_ix {
3948 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3949 } else if Some(capture.index) == config.close_capture_ix {
3950 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3951 }
3952 }
3953 }
3954
3955 if buffer_ranges.is_empty() {
3956 return None;
3957 }
3958
3959 let mut text = String::new();
3960 let mut highlight_ranges = Vec::new();
3961 let mut name_ranges = Vec::new();
3962 let mut chunks = self.chunks(
3963 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3964 true,
3965 );
3966 let mut last_buffer_range_end = 0;
3967 for (buffer_range, is_name) in buffer_ranges {
3968 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3969 if space_added {
3970 text.push(' ');
3971 }
3972 let before_append_len = text.len();
3973 let mut offset = buffer_range.start;
3974 chunks.seek(buffer_range.clone());
3975 for mut chunk in chunks.by_ref() {
3976 if chunk.text.len() > buffer_range.end - offset {
3977 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3978 offset = buffer_range.end;
3979 } else {
3980 offset += chunk.text.len();
3981 }
3982 let style = chunk
3983 .syntax_highlight_id
3984 .zip(theme)
3985 .and_then(|(highlight, theme)| highlight.style(theme));
3986 if let Some(style) = style {
3987 let start = text.len();
3988 let end = start + chunk.text.len();
3989 highlight_ranges.push((start..end, style));
3990 }
3991 text.push_str(chunk.text);
3992 if offset >= buffer_range.end {
3993 break;
3994 }
3995 }
3996 if is_name {
3997 let after_append_len = text.len();
3998 let start = if space_added && !name_ranges.is_empty() {
3999 before_append_len - 1
4000 } else {
4001 before_append_len
4002 };
4003 name_ranges.push(start..after_append_len);
4004 }
4005 last_buffer_range_end = buffer_range.end;
4006 }
4007
4008 let signature_range = signature_start
4009 .zip(signature_end)
4010 .map(|(start, end)| start..end);
4011
4012 Some(OutlineItem {
4013 depth: 0, // We'll calculate the depth later
4014 range: item_point_range,
4015 text,
4016 highlight_ranges,
4017 name_ranges,
4018 signature_range,
4019 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4020 annotation_range: None,
4021 })
4022 }
4023
4024 pub fn function_body_fold_ranges<T: ToOffset>(
4025 &self,
4026 within: Range<T>,
4027 ) -> impl Iterator<Item = Range<usize>> + '_ {
4028 self.text_object_ranges(within, TreeSitterOptions::default())
4029 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4030 }
4031
4032 /// For each grammar in the language, runs the provided
4033 /// [`tree_sitter::Query`] against the given range.
4034 pub fn matches(
4035 &self,
4036 range: Range<usize>,
4037 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4038 ) -> SyntaxMapMatches<'_> {
4039 self.syntax.matches(range, self, query)
4040 }
4041
4042 pub fn all_bracket_ranges(
4043 &self,
4044 range: Range<usize>,
4045 ) -> impl Iterator<Item = BracketMatch> + '_ {
4046 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4047 grammar.brackets_config.as_ref().map(|c| &c.query)
4048 });
4049 let configs = matches
4050 .grammars()
4051 .iter()
4052 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4053 .collect::<Vec<_>>();
4054
4055 iter::from_fn(move || {
4056 while let Some(mat) = matches.peek() {
4057 let mut open = None;
4058 let mut close = None;
4059 let config = &configs[mat.grammar_index];
4060 let pattern = &config.patterns[mat.pattern_index];
4061 for capture in mat.captures {
4062 if capture.index == config.open_capture_ix {
4063 open = Some(capture.node.byte_range());
4064 } else if capture.index == config.close_capture_ix {
4065 close = Some(capture.node.byte_range());
4066 }
4067 }
4068
4069 matches.advance();
4070
4071 let Some((open_range, close_range)) = open.zip(close) else {
4072 continue;
4073 };
4074
4075 let bracket_range = open_range.start..=close_range.end;
4076 if !bracket_range.overlaps(&range) {
4077 continue;
4078 }
4079
4080 return Some(BracketMatch {
4081 open_range,
4082 close_range,
4083 newline_only: pattern.newline_only,
4084 });
4085 }
4086 None
4087 })
4088 }
4089
4090 /// Returns bracket range pairs overlapping or adjacent to `range`
4091 pub fn bracket_ranges<T: ToOffset>(
4092 &self,
4093 range: Range<T>,
4094 ) -> impl Iterator<Item = BracketMatch> + '_ {
4095 // Find bracket pairs that *inclusively* contain the given range.
4096 let range = range.start.to_offset(self).saturating_sub(1)
4097 ..self.len().min(range.end.to_offset(self) + 1);
4098 self.all_bracket_ranges(range)
4099 .filter(|pair| !pair.newline_only)
4100 }
4101
4102 pub fn debug_variables_query<T: ToOffset>(
4103 &self,
4104 range: Range<T>,
4105 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4106 let range = range.start.to_offset(self).saturating_sub(1)
4107 ..self.len().min(range.end.to_offset(self) + 1);
4108
4109 let mut matches = self.syntax.matches_with_options(
4110 range.clone(),
4111 &self.text,
4112 TreeSitterOptions::default(),
4113 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4114 );
4115
4116 let configs = matches
4117 .grammars()
4118 .iter()
4119 .map(|grammar| grammar.debug_variables_config.as_ref())
4120 .collect::<Vec<_>>();
4121
4122 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4123
4124 iter::from_fn(move || {
4125 loop {
4126 while let Some(capture) = captures.pop() {
4127 if capture.0.overlaps(&range) {
4128 return Some(capture);
4129 }
4130 }
4131
4132 let mat = matches.peek()?;
4133
4134 let Some(config) = configs[mat.grammar_index].as_ref() else {
4135 matches.advance();
4136 continue;
4137 };
4138
4139 for capture in mat.captures {
4140 let Some(ix) = config
4141 .objects_by_capture_ix
4142 .binary_search_by_key(&capture.index, |e| e.0)
4143 .ok()
4144 else {
4145 continue;
4146 };
4147 let text_object = config.objects_by_capture_ix[ix].1;
4148 let byte_range = capture.node.byte_range();
4149
4150 let mut found = false;
4151 for (range, existing) in captures.iter_mut() {
4152 if existing == &text_object {
4153 range.start = range.start.min(byte_range.start);
4154 range.end = range.end.max(byte_range.end);
4155 found = true;
4156 break;
4157 }
4158 }
4159
4160 if !found {
4161 captures.push((byte_range, text_object));
4162 }
4163 }
4164
4165 matches.advance();
4166 }
4167 })
4168 }
4169
4170 pub fn text_object_ranges<T: ToOffset>(
4171 &self,
4172 range: Range<T>,
4173 options: TreeSitterOptions,
4174 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4175 let range = range.start.to_offset(self).saturating_sub(1)
4176 ..self.len().min(range.end.to_offset(self) + 1);
4177
4178 let mut matches =
4179 self.syntax
4180 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4181 grammar.text_object_config.as_ref().map(|c| &c.query)
4182 });
4183
4184 let configs = matches
4185 .grammars()
4186 .iter()
4187 .map(|grammar| grammar.text_object_config.as_ref())
4188 .collect::<Vec<_>>();
4189
4190 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4191
4192 iter::from_fn(move || {
4193 loop {
4194 while let Some(capture) = captures.pop() {
4195 if capture.0.overlaps(&range) {
4196 return Some(capture);
4197 }
4198 }
4199
4200 let mat = matches.peek()?;
4201
4202 let Some(config) = configs[mat.grammar_index].as_ref() else {
4203 matches.advance();
4204 continue;
4205 };
4206
4207 for capture in mat.captures {
4208 let Some(ix) = config
4209 .text_objects_by_capture_ix
4210 .binary_search_by_key(&capture.index, |e| e.0)
4211 .ok()
4212 else {
4213 continue;
4214 };
4215 let text_object = config.text_objects_by_capture_ix[ix].1;
4216 let byte_range = capture.node.byte_range();
4217
4218 let mut found = false;
4219 for (range, existing) in captures.iter_mut() {
4220 if existing == &text_object {
4221 range.start = range.start.min(byte_range.start);
4222 range.end = range.end.max(byte_range.end);
4223 found = true;
4224 break;
4225 }
4226 }
4227
4228 if !found {
4229 captures.push((byte_range, text_object));
4230 }
4231 }
4232
4233 matches.advance();
4234 }
4235 })
4236 }
4237
4238 /// Returns enclosing bracket ranges containing the given range
4239 pub fn enclosing_bracket_ranges<T: ToOffset>(
4240 &self,
4241 range: Range<T>,
4242 ) -> impl Iterator<Item = BracketMatch> + '_ {
4243 let range = range.start.to_offset(self)..range.end.to_offset(self);
4244
4245 self.bracket_ranges(range.clone()).filter(move |pair| {
4246 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4247 })
4248 }
4249
4250 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4251 ///
4252 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4253 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4254 &self,
4255 range: Range<T>,
4256 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4257 ) -> Option<(Range<usize>, Range<usize>)> {
4258 let range = range.start.to_offset(self)..range.end.to_offset(self);
4259
4260 // Get the ranges of the innermost pair of brackets.
4261 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4262
4263 for pair in self.enclosing_bracket_ranges(range) {
4264 if let Some(range_filter) = range_filter
4265 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4266 {
4267 continue;
4268 }
4269
4270 let len = pair.close_range.end - pair.open_range.start;
4271
4272 if let Some((existing_open, existing_close)) = &result {
4273 let existing_len = existing_close.end - existing_open.start;
4274 if len > existing_len {
4275 continue;
4276 }
4277 }
4278
4279 result = Some((pair.open_range, pair.close_range));
4280 }
4281
4282 result
4283 }
4284
4285 /// Returns anchor ranges for any matches of the redaction query.
4286 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4287 /// will be run on the relevant section of the buffer.
4288 pub fn redacted_ranges<T: ToOffset>(
4289 &self,
4290 range: Range<T>,
4291 ) -> impl Iterator<Item = Range<usize>> + '_ {
4292 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4293 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4294 grammar
4295 .redactions_config
4296 .as_ref()
4297 .map(|config| &config.query)
4298 });
4299
4300 let configs = syntax_matches
4301 .grammars()
4302 .iter()
4303 .map(|grammar| grammar.redactions_config.as_ref())
4304 .collect::<Vec<_>>();
4305
4306 iter::from_fn(move || {
4307 let redacted_range = syntax_matches
4308 .peek()
4309 .and_then(|mat| {
4310 configs[mat.grammar_index].and_then(|config| {
4311 mat.captures
4312 .iter()
4313 .find(|capture| capture.index == config.redaction_capture_ix)
4314 })
4315 })
4316 .map(|mat| mat.node.byte_range());
4317 syntax_matches.advance();
4318 redacted_range
4319 })
4320 }
4321
4322 pub fn injections_intersecting_range<T: ToOffset>(
4323 &self,
4324 range: Range<T>,
4325 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4326 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4327
4328 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4329 grammar
4330 .injection_config
4331 .as_ref()
4332 .map(|config| &config.query)
4333 });
4334
4335 let configs = syntax_matches
4336 .grammars()
4337 .iter()
4338 .map(|grammar| grammar.injection_config.as_ref())
4339 .collect::<Vec<_>>();
4340
4341 iter::from_fn(move || {
4342 let ranges = syntax_matches.peek().and_then(|mat| {
4343 let config = &configs[mat.grammar_index]?;
4344 let content_capture_range = mat.captures.iter().find_map(|capture| {
4345 if capture.index == config.content_capture_ix {
4346 Some(capture.node.byte_range())
4347 } else {
4348 None
4349 }
4350 })?;
4351 let language = self.language_at(content_capture_range.start)?;
4352 Some((content_capture_range, language))
4353 });
4354 syntax_matches.advance();
4355 ranges
4356 })
4357 }
4358
4359 pub fn runnable_ranges(
4360 &self,
4361 offset_range: Range<usize>,
4362 ) -> impl Iterator<Item = RunnableRange> + '_ {
4363 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4364 grammar.runnable_config.as_ref().map(|config| &config.query)
4365 });
4366
4367 let test_configs = syntax_matches
4368 .grammars()
4369 .iter()
4370 .map(|grammar| grammar.runnable_config.as_ref())
4371 .collect::<Vec<_>>();
4372
4373 iter::from_fn(move || {
4374 loop {
4375 let mat = syntax_matches.peek()?;
4376
4377 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4378 let mut run_range = None;
4379 let full_range = mat.captures.iter().fold(
4380 Range {
4381 start: usize::MAX,
4382 end: 0,
4383 },
4384 |mut acc, next| {
4385 let byte_range = next.node.byte_range();
4386 if acc.start > byte_range.start {
4387 acc.start = byte_range.start;
4388 }
4389 if acc.end < byte_range.end {
4390 acc.end = byte_range.end;
4391 }
4392 acc
4393 },
4394 );
4395 if full_range.start > full_range.end {
4396 // We did not find a full spanning range of this match.
4397 return None;
4398 }
4399 let extra_captures: SmallVec<[_; 1]> =
4400 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4401 test_configs
4402 .extra_captures
4403 .get(capture.index as usize)
4404 .cloned()
4405 .and_then(|tag_name| match tag_name {
4406 RunnableCapture::Named(name) => {
4407 Some((capture.node.byte_range(), name))
4408 }
4409 RunnableCapture::Run => {
4410 let _ = run_range.insert(capture.node.byte_range());
4411 None
4412 }
4413 })
4414 }));
4415 let run_range = run_range?;
4416 let tags = test_configs
4417 .query
4418 .property_settings(mat.pattern_index)
4419 .iter()
4420 .filter_map(|property| {
4421 if *property.key == *"tag" {
4422 property
4423 .value
4424 .as_ref()
4425 .map(|value| RunnableTag(value.to_string().into()))
4426 } else {
4427 None
4428 }
4429 })
4430 .collect();
4431 let extra_captures = extra_captures
4432 .into_iter()
4433 .map(|(range, name)| {
4434 (
4435 name.to_string(),
4436 self.text_for_range(range).collect::<String>(),
4437 )
4438 })
4439 .collect();
4440 // All tags should have the same range.
4441 Some(RunnableRange {
4442 run_range,
4443 full_range,
4444 runnable: Runnable {
4445 tags,
4446 language: mat.language,
4447 buffer: self.remote_id(),
4448 },
4449 extra_captures,
4450 buffer_id: self.remote_id(),
4451 })
4452 });
4453
4454 syntax_matches.advance();
4455 if test_range.is_some() {
4456 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4457 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4458 return test_range;
4459 }
4460 }
4461 })
4462 }
4463
4464 /// Returns selections for remote peers intersecting the given range.
4465 #[allow(clippy::type_complexity)]
4466 pub fn selections_in_range(
4467 &self,
4468 range: Range<Anchor>,
4469 include_local: bool,
4470 ) -> impl Iterator<
4471 Item = (
4472 ReplicaId,
4473 bool,
4474 CursorShape,
4475 impl Iterator<Item = &Selection<Anchor>> + '_,
4476 ),
4477 > + '_ {
4478 self.remote_selections
4479 .iter()
4480 .filter(move |(replica_id, set)| {
4481 (include_local || **replica_id != self.text.replica_id())
4482 && !set.selections.is_empty()
4483 })
4484 .map(move |(replica_id, set)| {
4485 let start_ix = match set.selections.binary_search_by(|probe| {
4486 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4487 }) {
4488 Ok(ix) | Err(ix) => ix,
4489 };
4490 let end_ix = match set.selections.binary_search_by(|probe| {
4491 probe.start.cmp(&range.end, self).then(Ordering::Less)
4492 }) {
4493 Ok(ix) | Err(ix) => ix,
4494 };
4495
4496 (
4497 *replica_id,
4498 set.line_mode,
4499 set.cursor_shape,
4500 set.selections[start_ix..end_ix].iter(),
4501 )
4502 })
4503 }
4504
4505 /// Returns if the buffer contains any diagnostics.
4506 pub fn has_diagnostics(&self) -> bool {
4507 !self.diagnostics.is_empty()
4508 }
4509
4510 /// Returns all the diagnostics intersecting the given range.
4511 pub fn diagnostics_in_range<'a, T, O>(
4512 &'a self,
4513 search_range: Range<T>,
4514 reversed: bool,
4515 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4516 where
4517 T: 'a + Clone + ToOffset,
4518 O: 'a + FromAnchor,
4519 {
4520 let mut iterators: Vec<_> = self
4521 .diagnostics
4522 .iter()
4523 .map(|(_, collection)| {
4524 collection
4525 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4526 .peekable()
4527 })
4528 .collect();
4529
4530 std::iter::from_fn(move || {
4531 let (next_ix, _) = iterators
4532 .iter_mut()
4533 .enumerate()
4534 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4535 .min_by(|(_, a), (_, b)| {
4536 let cmp = a
4537 .range
4538 .start
4539 .cmp(&b.range.start, self)
4540 // when range is equal, sort by diagnostic severity
4541 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4542 // and stabilize order with group_id
4543 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4544 if reversed { cmp.reverse() } else { cmp }
4545 })?;
4546 iterators[next_ix]
4547 .next()
4548 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4549 diagnostic,
4550 range: FromAnchor::from_anchor(&range.start, self)
4551 ..FromAnchor::from_anchor(&range.end, self),
4552 })
4553 })
4554 }
4555
4556 /// Returns all the diagnostic groups associated with the given
4557 /// language server ID. If no language server ID is provided,
4558 /// all diagnostics groups are returned.
4559 pub fn diagnostic_groups(
4560 &self,
4561 language_server_id: Option<LanguageServerId>,
4562 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4563 let mut groups = Vec::new();
4564
4565 if let Some(language_server_id) = language_server_id {
4566 if let Ok(ix) = self
4567 .diagnostics
4568 .binary_search_by_key(&language_server_id, |e| e.0)
4569 {
4570 self.diagnostics[ix]
4571 .1
4572 .groups(language_server_id, &mut groups, self);
4573 }
4574 } else {
4575 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4576 diagnostics.groups(*language_server_id, &mut groups, self);
4577 }
4578 }
4579
4580 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4581 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4582 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4583 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4584 });
4585
4586 groups
4587 }
4588
4589 /// Returns an iterator over the diagnostics for the given group.
4590 pub fn diagnostic_group<O>(
4591 &self,
4592 group_id: usize,
4593 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4594 where
4595 O: FromAnchor + 'static,
4596 {
4597 self.diagnostics
4598 .iter()
4599 .flat_map(move |(_, set)| set.group(group_id, self))
4600 }
4601
4602 /// An integer version number that accounts for all updates besides
4603 /// the buffer's text itself (which is versioned via a version vector).
4604 pub fn non_text_state_update_count(&self) -> usize {
4605 self.non_text_state_update_count
4606 }
4607
4608 /// An integer version that changes when the buffer's syntax changes.
4609 pub fn syntax_update_count(&self) -> usize {
4610 self.syntax.update_count()
4611 }
4612
4613 /// Returns a snapshot of underlying file.
4614 pub fn file(&self) -> Option<&Arc<dyn File>> {
4615 self.file.as_ref()
4616 }
4617
4618 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4619 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4620 if let Some(file) = self.file() {
4621 if file.path().file_name().is_none() || include_root {
4622 Some(file.full_path(cx))
4623 } else {
4624 Some(file.path().to_path_buf())
4625 }
4626 } else {
4627 None
4628 }
4629 }
4630
4631 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4632 let query_str = query.fuzzy_contents;
4633 if query_str.is_some_and(|query| query.is_empty()) {
4634 return BTreeMap::default();
4635 }
4636
4637 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4638 language,
4639 override_id: None,
4640 }));
4641
4642 let mut query_ix = 0;
4643 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4644 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4645
4646 let mut words = BTreeMap::default();
4647 let mut current_word_start_ix = None;
4648 let mut chunk_ix = query.range.start;
4649 for chunk in self.chunks(query.range, false) {
4650 for (i, c) in chunk.text.char_indices() {
4651 let ix = chunk_ix + i;
4652 if classifier.is_word(c) {
4653 if current_word_start_ix.is_none() {
4654 current_word_start_ix = Some(ix);
4655 }
4656
4657 if let Some(query_chars) = &query_chars
4658 && query_ix < query_len
4659 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4660 {
4661 query_ix += 1;
4662 }
4663 continue;
4664 } else if let Some(word_start) = current_word_start_ix.take()
4665 && query_ix == query_len
4666 {
4667 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4668 let mut word_text = self.text_for_range(word_start..ix).peekable();
4669 let first_char = word_text
4670 .peek()
4671 .and_then(|first_chunk| first_chunk.chars().next());
4672 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4673 if !query.skip_digits
4674 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4675 {
4676 words.insert(word_text.collect(), word_range);
4677 }
4678 }
4679 query_ix = 0;
4680 }
4681 chunk_ix += chunk.text.len();
4682 }
4683
4684 words
4685 }
4686}
4687
4688pub struct WordsQuery<'a> {
4689 /// Only returns words with all chars from the fuzzy string in them.
4690 pub fuzzy_contents: Option<&'a str>,
4691 /// Skips words that start with a digit.
4692 pub skip_digits: bool,
4693 /// Buffer offset range, to look for words.
4694 pub range: Range<usize>,
4695}
4696
4697fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4698 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4699}
4700
4701fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4702 let mut result = IndentSize::spaces(0);
4703 for c in text {
4704 let kind = match c {
4705 ' ' => IndentKind::Space,
4706 '\t' => IndentKind::Tab,
4707 _ => break,
4708 };
4709 if result.len == 0 {
4710 result.kind = kind;
4711 }
4712 result.len += 1;
4713 }
4714 result
4715}
4716
4717impl Clone for BufferSnapshot {
4718 fn clone(&self) -> Self {
4719 Self {
4720 text: self.text.clone(),
4721 syntax: self.syntax.clone(),
4722 file: self.file.clone(),
4723 remote_selections: self.remote_selections.clone(),
4724 diagnostics: self.diagnostics.clone(),
4725 language: self.language.clone(),
4726 non_text_state_update_count: self.non_text_state_update_count,
4727 }
4728 }
4729}
4730
4731impl Deref for BufferSnapshot {
4732 type Target = text::BufferSnapshot;
4733
4734 fn deref(&self) -> &Self::Target {
4735 &self.text
4736 }
4737}
4738
4739unsafe impl Send for BufferChunks<'_> {}
4740
4741impl<'a> BufferChunks<'a> {
4742 pub(crate) fn new(
4743 text: &'a Rope,
4744 range: Range<usize>,
4745 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4746 diagnostics: bool,
4747 buffer_snapshot: Option<&'a BufferSnapshot>,
4748 ) -> Self {
4749 let mut highlights = None;
4750 if let Some((captures, highlight_maps)) = syntax {
4751 highlights = Some(BufferChunkHighlights {
4752 captures,
4753 next_capture: None,
4754 stack: Default::default(),
4755 highlight_maps,
4756 })
4757 }
4758
4759 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4760 let chunks = text.chunks_in_range(range.clone());
4761
4762 let mut this = BufferChunks {
4763 range,
4764 buffer_snapshot,
4765 chunks,
4766 diagnostic_endpoints,
4767 error_depth: 0,
4768 warning_depth: 0,
4769 information_depth: 0,
4770 hint_depth: 0,
4771 unnecessary_depth: 0,
4772 underline: true,
4773 highlights,
4774 };
4775 this.initialize_diagnostic_endpoints();
4776 this
4777 }
4778
4779 /// Seeks to the given byte offset in the buffer.
4780 pub fn seek(&mut self, range: Range<usize>) {
4781 let old_range = std::mem::replace(&mut self.range, range.clone());
4782 self.chunks.set_range(self.range.clone());
4783 if let Some(highlights) = self.highlights.as_mut() {
4784 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4785 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4786 highlights
4787 .stack
4788 .retain(|(end_offset, _)| *end_offset > range.start);
4789 if let Some(capture) = &highlights.next_capture
4790 && range.start >= capture.node.start_byte()
4791 {
4792 let next_capture_end = capture.node.end_byte();
4793 if range.start < next_capture_end {
4794 highlights.stack.push((
4795 next_capture_end,
4796 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4797 ));
4798 }
4799 highlights.next_capture.take();
4800 }
4801 } else if let Some(snapshot) = self.buffer_snapshot {
4802 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4803 *highlights = BufferChunkHighlights {
4804 captures,
4805 next_capture: None,
4806 stack: Default::default(),
4807 highlight_maps,
4808 };
4809 } else {
4810 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4811 // Seeking such BufferChunks is not supported.
4812 debug_assert!(
4813 false,
4814 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4815 );
4816 }
4817
4818 highlights.captures.set_byte_range(self.range.clone());
4819 self.initialize_diagnostic_endpoints();
4820 }
4821 }
4822
4823 fn initialize_diagnostic_endpoints(&mut self) {
4824 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4825 && let Some(buffer) = self.buffer_snapshot
4826 {
4827 let mut diagnostic_endpoints = Vec::new();
4828 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4829 diagnostic_endpoints.push(DiagnosticEndpoint {
4830 offset: entry.range.start,
4831 is_start: true,
4832 severity: entry.diagnostic.severity,
4833 is_unnecessary: entry.diagnostic.is_unnecessary,
4834 underline: entry.diagnostic.underline,
4835 });
4836 diagnostic_endpoints.push(DiagnosticEndpoint {
4837 offset: entry.range.end,
4838 is_start: false,
4839 severity: entry.diagnostic.severity,
4840 is_unnecessary: entry.diagnostic.is_unnecessary,
4841 underline: entry.diagnostic.underline,
4842 });
4843 }
4844 diagnostic_endpoints
4845 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4846 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4847 self.hint_depth = 0;
4848 self.error_depth = 0;
4849 self.warning_depth = 0;
4850 self.information_depth = 0;
4851 }
4852 }
4853
4854 /// The current byte offset in the buffer.
4855 pub fn offset(&self) -> usize {
4856 self.range.start
4857 }
4858
4859 pub fn range(&self) -> Range<usize> {
4860 self.range.clone()
4861 }
4862
4863 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4864 let depth = match endpoint.severity {
4865 DiagnosticSeverity::ERROR => &mut self.error_depth,
4866 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4867 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4868 DiagnosticSeverity::HINT => &mut self.hint_depth,
4869 _ => return,
4870 };
4871 if endpoint.is_start {
4872 *depth += 1;
4873 } else {
4874 *depth -= 1;
4875 }
4876
4877 if endpoint.is_unnecessary {
4878 if endpoint.is_start {
4879 self.unnecessary_depth += 1;
4880 } else {
4881 self.unnecessary_depth -= 1;
4882 }
4883 }
4884 }
4885
4886 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4887 if self.error_depth > 0 {
4888 Some(DiagnosticSeverity::ERROR)
4889 } else if self.warning_depth > 0 {
4890 Some(DiagnosticSeverity::WARNING)
4891 } else if self.information_depth > 0 {
4892 Some(DiagnosticSeverity::INFORMATION)
4893 } else if self.hint_depth > 0 {
4894 Some(DiagnosticSeverity::HINT)
4895 } else {
4896 None
4897 }
4898 }
4899
4900 fn current_code_is_unnecessary(&self) -> bool {
4901 self.unnecessary_depth > 0
4902 }
4903}
4904
4905impl<'a> Iterator for BufferChunks<'a> {
4906 type Item = Chunk<'a>;
4907
4908 fn next(&mut self) -> Option<Self::Item> {
4909 let mut next_capture_start = usize::MAX;
4910 let mut next_diagnostic_endpoint = usize::MAX;
4911
4912 if let Some(highlights) = self.highlights.as_mut() {
4913 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4914 if *parent_capture_end <= self.range.start {
4915 highlights.stack.pop();
4916 } else {
4917 break;
4918 }
4919 }
4920
4921 if highlights.next_capture.is_none() {
4922 highlights.next_capture = highlights.captures.next();
4923 }
4924
4925 while let Some(capture) = highlights.next_capture.as_ref() {
4926 if self.range.start < capture.node.start_byte() {
4927 next_capture_start = capture.node.start_byte();
4928 break;
4929 } else {
4930 let highlight_id =
4931 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4932 highlights
4933 .stack
4934 .push((capture.node.end_byte(), highlight_id));
4935 highlights.next_capture = highlights.captures.next();
4936 }
4937 }
4938 }
4939
4940 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4941 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4942 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4943 if endpoint.offset <= self.range.start {
4944 self.update_diagnostic_depths(endpoint);
4945 diagnostic_endpoints.next();
4946 self.underline = endpoint.underline;
4947 } else {
4948 next_diagnostic_endpoint = endpoint.offset;
4949 break;
4950 }
4951 }
4952 }
4953 self.diagnostic_endpoints = diagnostic_endpoints;
4954
4955 if let Some(ChunkBitmaps {
4956 text: chunk,
4957 chars: chars_map,
4958 tabs,
4959 }) = self.chunks.peek_tabs()
4960 {
4961 let chunk_start = self.range.start;
4962 let mut chunk_end = (self.chunks.offset() + chunk.len())
4963 .min(next_capture_start)
4964 .min(next_diagnostic_endpoint);
4965 let mut highlight_id = None;
4966 if let Some(highlights) = self.highlights.as_ref()
4967 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4968 {
4969 chunk_end = chunk_end.min(*parent_capture_end);
4970 highlight_id = Some(*parent_highlight_id);
4971 }
4972
4973 let slice =
4974 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4975 let bit_end = chunk_end - self.chunks.offset();
4976
4977 let mask = if bit_end >= 128 {
4978 u128::MAX
4979 } else {
4980 (1u128 << bit_end) - 1
4981 };
4982 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
4983 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
4984
4985 self.range.start = chunk_end;
4986 if self.range.start == self.chunks.offset() + chunk.len() {
4987 self.chunks.next().unwrap();
4988 }
4989
4990 Some(Chunk {
4991 text: slice,
4992 syntax_highlight_id: highlight_id,
4993 underline: self.underline,
4994 diagnostic_severity: self.current_diagnostic_severity(),
4995 is_unnecessary: self.current_code_is_unnecessary(),
4996 tabs,
4997 chars: chars_map,
4998 ..Chunk::default()
4999 })
5000 } else {
5001 None
5002 }
5003 }
5004}
5005
5006impl operation_queue::Operation for Operation {
5007 fn lamport_timestamp(&self) -> clock::Lamport {
5008 match self {
5009 Operation::Buffer(_) => {
5010 unreachable!("buffer operations should never be deferred at this layer")
5011 }
5012 Operation::UpdateDiagnostics {
5013 lamport_timestamp, ..
5014 }
5015 | Operation::UpdateSelections {
5016 lamport_timestamp, ..
5017 }
5018 | Operation::UpdateCompletionTriggers {
5019 lamport_timestamp, ..
5020 }
5021 | Operation::UpdateLineEnding {
5022 lamport_timestamp, ..
5023 } => *lamport_timestamp,
5024 }
5025 }
5026}
5027
5028impl Default for Diagnostic {
5029 fn default() -> Self {
5030 Self {
5031 source: Default::default(),
5032 source_kind: DiagnosticSourceKind::Other,
5033 code: None,
5034 code_description: None,
5035 severity: DiagnosticSeverity::ERROR,
5036 message: Default::default(),
5037 markdown: None,
5038 group_id: 0,
5039 is_primary: false,
5040 is_disk_based: false,
5041 is_unnecessary: false,
5042 underline: true,
5043 data: None,
5044 }
5045 }
5046}
5047
5048impl IndentSize {
5049 /// Returns an [`IndentSize`] representing the given spaces.
5050 pub fn spaces(len: u32) -> Self {
5051 Self {
5052 len,
5053 kind: IndentKind::Space,
5054 }
5055 }
5056
5057 /// Returns an [`IndentSize`] representing a tab.
5058 pub fn tab() -> Self {
5059 Self {
5060 len: 1,
5061 kind: IndentKind::Tab,
5062 }
5063 }
5064
5065 /// An iterator over the characters represented by this [`IndentSize`].
5066 pub fn chars(&self) -> impl Iterator<Item = char> {
5067 iter::repeat(self.char()).take(self.len as usize)
5068 }
5069
5070 /// The character representation of this [`IndentSize`].
5071 pub fn char(&self) -> char {
5072 match self.kind {
5073 IndentKind::Space => ' ',
5074 IndentKind::Tab => '\t',
5075 }
5076 }
5077
5078 /// Consumes the current [`IndentSize`] and returns a new one that has
5079 /// been shrunk or enlarged by the given size along the given direction.
5080 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5081 match direction {
5082 Ordering::Less => {
5083 if self.kind == size.kind && self.len >= size.len {
5084 self.len -= size.len;
5085 }
5086 }
5087 Ordering::Equal => {}
5088 Ordering::Greater => {
5089 if self.len == 0 {
5090 self = size;
5091 } else if self.kind == size.kind {
5092 self.len += size.len;
5093 }
5094 }
5095 }
5096 self
5097 }
5098
5099 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5100 match self.kind {
5101 IndentKind::Space => self.len as usize,
5102 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5103 }
5104 }
5105}
5106
5107#[cfg(any(test, feature = "test-support"))]
5108pub struct TestFile {
5109 pub path: Arc<Path>,
5110 pub root_name: String,
5111 pub local_root: Option<PathBuf>,
5112}
5113
5114#[cfg(any(test, feature = "test-support"))]
5115impl File for TestFile {
5116 fn path(&self) -> &Arc<Path> {
5117 &self.path
5118 }
5119
5120 fn full_path(&self, _: &gpui::App) -> PathBuf {
5121 PathBuf::from(&self.root_name).join(self.path.as_ref())
5122 }
5123
5124 fn as_local(&self) -> Option<&dyn LocalFile> {
5125 if self.local_root.is_some() {
5126 Some(self)
5127 } else {
5128 None
5129 }
5130 }
5131
5132 fn disk_state(&self) -> DiskState {
5133 unimplemented!()
5134 }
5135
5136 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
5137 self.path().file_name().unwrap_or(self.root_name.as_ref())
5138 }
5139
5140 fn worktree_id(&self, _: &App) -> WorktreeId {
5141 WorktreeId::from_usize(0)
5142 }
5143
5144 fn to_proto(&self, _: &App) -> rpc::proto::File {
5145 unimplemented!()
5146 }
5147
5148 fn is_private(&self) -> bool {
5149 false
5150 }
5151}
5152
5153#[cfg(any(test, feature = "test-support"))]
5154impl LocalFile for TestFile {
5155 fn abs_path(&self, _cx: &App) -> PathBuf {
5156 PathBuf::from(self.local_root.as_ref().unwrap())
5157 .join(&self.root_name)
5158 .join(self.path.as_ref())
5159 }
5160
5161 fn load(&self, _cx: &App) -> Task<Result<String>> {
5162 unimplemented!()
5163 }
5164
5165 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5166 unimplemented!()
5167 }
5168}
5169
5170pub(crate) fn contiguous_ranges(
5171 values: impl Iterator<Item = u32>,
5172 max_len: usize,
5173) -> impl Iterator<Item = Range<u32>> {
5174 let mut values = values;
5175 let mut current_range: Option<Range<u32>> = None;
5176 std::iter::from_fn(move || {
5177 loop {
5178 if let Some(value) = values.next() {
5179 if let Some(range) = &mut current_range
5180 && value == range.end
5181 && range.len() < max_len
5182 {
5183 range.end += 1;
5184 continue;
5185 }
5186
5187 let prev_range = current_range.clone();
5188 current_range = Some(value..(value + 1));
5189 if prev_range.is_some() {
5190 return prev_range;
5191 }
5192 } else {
5193 return current_range.take();
5194 }
5195 }
5196 })
5197}
5198
5199#[derive(Default, Debug)]
5200pub struct CharClassifier {
5201 scope: Option<LanguageScope>,
5202 for_completion: bool,
5203 ignore_punctuation: bool,
5204}
5205
5206impl CharClassifier {
5207 pub fn new(scope: Option<LanguageScope>) -> Self {
5208 Self {
5209 scope,
5210 for_completion: false,
5211 ignore_punctuation: false,
5212 }
5213 }
5214
5215 pub fn for_completion(self, for_completion: bool) -> Self {
5216 Self {
5217 for_completion,
5218 ..self
5219 }
5220 }
5221
5222 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5223 Self {
5224 ignore_punctuation,
5225 ..self
5226 }
5227 }
5228
5229 pub fn is_whitespace(&self, c: char) -> bool {
5230 self.kind(c) == CharKind::Whitespace
5231 }
5232
5233 pub fn is_word(&self, c: char) -> bool {
5234 self.kind(c) == CharKind::Word
5235 }
5236
5237 pub fn is_punctuation(&self, c: char) -> bool {
5238 self.kind(c) == CharKind::Punctuation
5239 }
5240
5241 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5242 if c.is_alphanumeric() || c == '_' {
5243 return CharKind::Word;
5244 }
5245
5246 if let Some(scope) = &self.scope {
5247 let characters = if self.for_completion {
5248 scope.completion_query_characters()
5249 } else {
5250 scope.word_characters()
5251 };
5252 if let Some(characters) = characters
5253 && characters.contains(&c)
5254 {
5255 return CharKind::Word;
5256 }
5257 }
5258
5259 if c.is_whitespace() {
5260 return CharKind::Whitespace;
5261 }
5262
5263 if ignore_punctuation {
5264 CharKind::Word
5265 } else {
5266 CharKind::Punctuation
5267 }
5268 }
5269
5270 pub fn kind(&self, c: char) -> CharKind {
5271 self.kind_with(c, self.ignore_punctuation)
5272 }
5273}
5274
5275/// Find all of the ranges of whitespace that occur at the ends of lines
5276/// in the given rope.
5277///
5278/// This could also be done with a regex search, but this implementation
5279/// avoids copying text.
5280pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5281 let mut ranges = Vec::new();
5282
5283 let mut offset = 0;
5284 let mut prev_chunk_trailing_whitespace_range = 0..0;
5285 for chunk in rope.chunks() {
5286 let mut prev_line_trailing_whitespace_range = 0..0;
5287 for (i, line) in chunk.split('\n').enumerate() {
5288 let line_end_offset = offset + line.len();
5289 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5290 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5291
5292 if i == 0 && trimmed_line_len == 0 {
5293 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5294 }
5295 if !prev_line_trailing_whitespace_range.is_empty() {
5296 ranges.push(prev_line_trailing_whitespace_range);
5297 }
5298
5299 offset = line_end_offset + 1;
5300 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5301 }
5302
5303 offset -= 1;
5304 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5305 }
5306
5307 if !prev_chunk_trailing_whitespace_range.is_empty() {
5308 ranges.push(prev_chunk_trailing_whitespace_range);
5309 }
5310
5311 ranges
5312}