1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::{SettingsUi, WorktreeId};
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
111 pending_autoindent: Option<Task<()>>,
112 sync_parse_timeout: Duration,
113 syntax_map: Mutex<SyntaxMap>,
114 reparse: Option<Task<()>>,
115 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
116 non_text_state_update_count: usize,
117 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
118 remote_selections: TreeMap<ReplicaId, SelectionSet>,
119 diagnostics_timestamp: clock::Lamport,
120 completion_triggers: BTreeSet<String>,
121 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
122 completion_triggers_timestamp: clock::Lamport,
123 deferred_ops: OperationQueue<Operation>,
124 capability: Capability,
125 has_conflict: bool,
126 /// Memoize calls to has_changes_since(saved_version).
127 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
128 has_unsaved_edits: Cell<(clock::Global, bool)>,
129 change_bits: Vec<rc::Weak<Cell<bool>>>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(
178 Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
179)]
180#[serde(rename_all = "snake_case")]
181pub enum CursorShape {
182 /// A vertical bar
183 #[default]
184 Bar,
185 /// A block that surrounds the following character
186 Block,
187 /// An underline that runs along the following character
188 Underline,
189 /// A box drawn around the following character
190 Hollow,
191}
192
193#[derive(Clone, Debug)]
194struct SelectionSet {
195 line_mode: bool,
196 cursor_shape: CursorShape,
197 selections: Arc<[Selection<Anchor>]>,
198 lamport_timestamp: clock::Lamport,
199}
200
201/// A diagnostic associated with a certain range of a buffer.
202#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
203pub struct Diagnostic {
204 /// The name of the service that produced this diagnostic.
205 pub source: Option<String>,
206 /// A machine-readable code that identifies this diagnostic.
207 pub code: Option<NumberOrString>,
208 pub code_description: Option<lsp::Uri>,
209 /// Whether this diagnostic is a hint, warning, or error.
210 pub severity: DiagnosticSeverity,
211 /// The human-readable message associated with this diagnostic.
212 pub message: String,
213 /// The human-readable message (in markdown format)
214 pub markdown: Option<String>,
215 /// An id that identifies the group to which this diagnostic belongs.
216 ///
217 /// When a language server produces a diagnostic with
218 /// one or more associated diagnostics, those diagnostics are all
219 /// assigned a single group ID.
220 pub group_id: usize,
221 /// Whether this diagnostic is the primary diagnostic for its group.
222 ///
223 /// In a given group, the primary diagnostic is the top-level diagnostic
224 /// returned by the language server. The non-primary diagnostics are the
225 /// associated diagnostics.
226 pub is_primary: bool,
227 /// Whether this diagnostic is considered to originate from an analysis of
228 /// files on disk, as opposed to any unsaved buffer contents. This is a
229 /// property of a given diagnostic source, and is configured for a given
230 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
231 /// for the language server.
232 pub is_disk_based: bool,
233 /// Whether this diagnostic marks unnecessary code.
234 pub is_unnecessary: bool,
235 /// Quick separation of diagnostics groups based by their source.
236 pub source_kind: DiagnosticSourceKind,
237 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
238 pub data: Option<Value>,
239 /// Whether to underline the corresponding text range in the editor.
240 pub underline: bool,
241}
242
243#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
244pub enum DiagnosticSourceKind {
245 Pulled,
246 Pushed,
247 Other,
248}
249
250/// An operation used to synchronize this buffer with its other replicas.
251#[derive(Clone, Debug, PartialEq)]
252pub enum Operation {
253 /// A text operation.
254 Buffer(text::Operation),
255
256 /// An update to the buffer's diagnostics.
257 UpdateDiagnostics {
258 /// The id of the language server that produced the new diagnostics.
259 server_id: LanguageServerId,
260 /// The diagnostics.
261 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 },
265
266 /// An update to the most recent selections in this buffer.
267 UpdateSelections {
268 /// The selections.
269 selections: Arc<[Selection<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 /// Whether the selections are in 'line mode'.
273 line_mode: bool,
274 /// The [`CursorShape`] associated with these selections.
275 cursor_shape: CursorShape,
276 },
277
278 /// An update to the characters that should trigger autocompletion
279 /// for this buffer.
280 UpdateCompletionTriggers {
281 /// The characters that trigger autocompletion.
282 triggers: Vec<String>,
283 /// The buffer's lamport timestamp.
284 lamport_timestamp: clock::Lamport,
285 /// The language server ID.
286 server_id: LanguageServerId,
287 },
288
289 /// An update to the line ending type of this buffer.
290 UpdateLineEnding {
291 /// The line ending type.
292 line_ending: LineEnding,
293 /// The buffer's lamport timestamp.
294 lamport_timestamp: clock::Lamport,
295 },
296}
297
298/// An event that occurs in a buffer.
299#[derive(Clone, Debug, PartialEq)]
300pub enum BufferEvent {
301 /// The buffer was changed in a way that must be
302 /// propagated to its other replicas.
303 Operation {
304 operation: Operation,
305 is_local: bool,
306 },
307 /// The buffer was edited.
308 Edited,
309 /// The buffer's `dirty` bit changed.
310 DirtyChanged,
311 /// The buffer was saved.
312 Saved,
313 /// The buffer's file was changed on disk.
314 FileHandleChanged,
315 /// The buffer was reloaded.
316 Reloaded,
317 /// The buffer is in need of a reload
318 ReloadNeeded,
319 /// The buffer's language was changed.
320 LanguageChanged,
321 /// The buffer's syntax trees were updated.
322 Reparsed,
323 /// The buffer's diagnostics were updated.
324 DiagnosticsUpdated,
325 /// The buffer gained or lost editing capabilities.
326 CapabilityChanged,
327}
328
329/// The file associated with a buffer.
330pub trait File: Send + Sync + Any {
331 /// Returns the [`LocalFile`] associated with this file, if the
332 /// file is local.
333 fn as_local(&self) -> Option<&dyn LocalFile>;
334
335 /// Returns whether this file is local.
336 fn is_local(&self) -> bool {
337 self.as_local().is_some()
338 }
339
340 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
341 /// only available in some states, such as modification time.
342 fn disk_state(&self) -> DiskState;
343
344 /// Returns the path of this file relative to the worktree's root directory.
345 fn path(&self) -> &Arc<Path>;
346
347 /// Returns the path of this file relative to the worktree's parent directory (this means it
348 /// includes the name of the worktree's root folder).
349 fn full_path(&self, cx: &App) -> PathBuf;
350
351 /// Returns the last component of this handle's absolute path. If this handle refers to the root
352 /// of its worktree, then this method will return the name of the worktree itself.
353 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
354
355 /// Returns the id of the worktree to which this file belongs.
356 ///
357 /// This is needed for looking up project-specific settings.
358 fn worktree_id(&self, cx: &App) -> WorktreeId;
359
360 /// Converts this file into a protobuf message.
361 fn to_proto(&self, cx: &App) -> rpc::proto::File;
362
363 /// Return whether Zed considers this to be a private file.
364 fn is_private(&self) -> bool;
365}
366
367/// The file's storage status - whether it's stored (`Present`), and if so when it was last
368/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
369/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
370/// indicator for new files.
371#[derive(Copy, Clone, Debug, PartialEq)]
372pub enum DiskState {
373 /// File created in Zed that has not been saved.
374 New,
375 /// File present on the filesystem.
376 Present { mtime: MTime },
377 /// Deleted file that was previously present.
378 Deleted,
379}
380
381impl DiskState {
382 /// Returns the file's last known modification time on disk.
383 pub fn mtime(self) -> Option<MTime> {
384 match self {
385 DiskState::New => None,
386 DiskState::Present { mtime } => Some(mtime),
387 DiskState::Deleted => None,
388 }
389 }
390
391 pub fn exists(&self) -> bool {
392 match self {
393 DiskState::New => false,
394 DiskState::Present { .. } => true,
395 DiskState::Deleted => false,
396 }
397 }
398}
399
400/// The file associated with a buffer, in the case where the file is on the local disk.
401pub trait LocalFile: File {
402 /// Returns the absolute path of this file
403 fn abs_path(&self, cx: &App) -> PathBuf;
404
405 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
406 fn load(&self, cx: &App) -> Task<Result<String>>;
407
408 /// Loads the file's contents from disk.
409 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
410}
411
412/// The auto-indent behavior associated with an editing operation.
413/// For some editing operations, each affected line of text has its
414/// indentation recomputed. For other operations, the entire block
415/// of edited text is adjusted uniformly.
416#[derive(Clone, Debug)]
417pub enum AutoindentMode {
418 /// Indent each line of inserted text.
419 EachLine,
420 /// Apply the same indentation adjustment to all of the lines
421 /// in a given insertion.
422 Block {
423 /// The original indentation column of the first line of each
424 /// insertion, if it has been copied.
425 ///
426 /// Knowing this makes it possible to preserve the relative indentation
427 /// of every line in the insertion from when it was copied.
428 ///
429 /// If the original indent column is `a`, and the first line of insertion
430 /// is then auto-indented to column `b`, then every other line of
431 /// the insertion will be auto-indented to column `b - a`
432 original_indent_columns: Vec<Option<u32>>,
433 },
434}
435
436#[derive(Clone)]
437struct AutoindentRequest {
438 before_edit: BufferSnapshot,
439 entries: Vec<AutoindentRequestEntry>,
440 is_block_mode: bool,
441 ignore_empty_lines: bool,
442}
443
444#[derive(Debug, Clone)]
445struct AutoindentRequestEntry {
446 /// A range of the buffer whose indentation should be adjusted.
447 range: Range<Anchor>,
448 /// Whether or not these lines should be considered brand new, for the
449 /// purpose of auto-indent. When text is not new, its indentation will
450 /// only be adjusted if the suggested indentation level has *changed*
451 /// since the edit was made.
452 first_line_is_new: bool,
453 indent_size: IndentSize,
454 original_indent_column: Option<u32>,
455}
456
457#[derive(Debug)]
458struct IndentSuggestion {
459 basis_row: u32,
460 delta: Ordering,
461 within_error: bool,
462}
463
464struct BufferChunkHighlights<'a> {
465 captures: SyntaxMapCaptures<'a>,
466 next_capture: Option<SyntaxMapCapture<'a>>,
467 stack: Vec<(usize, HighlightId)>,
468 highlight_maps: Vec<HighlightMap>,
469}
470
471/// An iterator that yields chunks of a buffer's text, along with their
472/// syntax highlights and diagnostic status.
473pub struct BufferChunks<'a> {
474 buffer_snapshot: Option<&'a BufferSnapshot>,
475 range: Range<usize>,
476 chunks: text::Chunks<'a>,
477 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
478 error_depth: usize,
479 warning_depth: usize,
480 information_depth: usize,
481 hint_depth: usize,
482 unnecessary_depth: usize,
483 underline: bool,
484 highlights: Option<BufferChunkHighlights<'a>>,
485}
486
487/// A chunk of a buffer's text, along with its syntax highlight and
488/// diagnostic status.
489#[derive(Clone, Debug, Default)]
490pub struct Chunk<'a> {
491 /// The text of the chunk.
492 pub text: &'a str,
493 /// The syntax highlighting style of the chunk.
494 pub syntax_highlight_id: Option<HighlightId>,
495 /// The highlight style that has been applied to this chunk in
496 /// the editor.
497 pub highlight_style: Option<HighlightStyle>,
498 /// The severity of diagnostic associated with this chunk, if any.
499 pub diagnostic_severity: Option<DiagnosticSeverity>,
500 /// Whether this chunk of text is marked as unnecessary.
501 pub is_unnecessary: bool,
502 /// Whether this chunk of text was originally a tab character.
503 pub is_tab: bool,
504 /// A bitset of which characters are tabs in this string.
505 pub tabs: u128,
506 /// Bitmap of character indices in this chunk
507 pub chars: u128,
508 /// Whether this chunk of text was originally a tab character.
509 pub is_inlay: bool,
510 /// Whether to underline the corresponding text range in the editor.
511 pub underline: bool,
512}
513
514/// A set of edits to a given version of a buffer, computed asynchronously.
515#[derive(Debug)]
516pub struct Diff {
517 pub base_version: clock::Global,
518 pub line_ending: LineEnding,
519 pub edits: Vec<(Range<usize>, Arc<str>)>,
520}
521
522#[derive(Debug, Clone, Copy)]
523pub(crate) struct DiagnosticEndpoint {
524 offset: usize,
525 is_start: bool,
526 underline: bool,
527 severity: DiagnosticSeverity,
528 is_unnecessary: bool,
529}
530
531/// A class of characters, used for characterizing a run of text.
532#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
533pub enum CharKind {
534 /// Whitespace.
535 Whitespace,
536 /// Punctuation.
537 Punctuation,
538 /// Word.
539 Word,
540}
541
542/// A runnable is a set of data about a region that could be resolved into a task
543pub struct Runnable {
544 pub tags: SmallVec<[RunnableTag; 1]>,
545 pub language: Arc<Language>,
546 pub buffer: BufferId,
547}
548
549#[derive(Default, Clone, Debug)]
550pub struct HighlightedText {
551 pub text: SharedString,
552 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
553}
554
555#[derive(Default, Debug)]
556struct HighlightedTextBuilder {
557 pub text: String,
558 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
559}
560
561impl HighlightedText {
562 pub fn from_buffer_range<T: ToOffset>(
563 range: Range<T>,
564 snapshot: &text::BufferSnapshot,
565 syntax_snapshot: &SyntaxSnapshot,
566 override_style: Option<HighlightStyle>,
567 syntax_theme: &SyntaxTheme,
568 ) -> Self {
569 let mut highlighted_text = HighlightedTextBuilder::default();
570 highlighted_text.add_text_from_buffer_range(
571 range,
572 snapshot,
573 syntax_snapshot,
574 override_style,
575 syntax_theme,
576 );
577 highlighted_text.build()
578 }
579
580 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
581 gpui::StyledText::new(self.text.clone())
582 .with_default_highlights(default_style, self.highlights.iter().cloned())
583 }
584
585 /// Returns the first line without leading whitespace unless highlighted
586 /// and a boolean indicating if there are more lines after
587 pub fn first_line_preview(self) -> (Self, bool) {
588 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
589 let first_line = &self.text[..newline_ix];
590
591 // Trim leading whitespace, unless an edit starts prior to it.
592 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
593 if let Some((first_highlight_range, _)) = self.highlights.first() {
594 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
595 }
596
597 let preview_text = &first_line[preview_start_ix..];
598 let preview_highlights = self
599 .highlights
600 .into_iter()
601 .take_while(|(range, _)| range.start < newline_ix)
602 .filter_map(|(mut range, highlight)| {
603 range.start = range.start.saturating_sub(preview_start_ix);
604 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
605 if range.is_empty() {
606 None
607 } else {
608 Some((range, highlight))
609 }
610 });
611
612 let preview = Self {
613 text: SharedString::new(preview_text),
614 highlights: preview_highlights.collect(),
615 };
616
617 (preview, self.text.len() > newline_ix)
618 }
619}
620
621impl HighlightedTextBuilder {
622 pub fn build(self) -> HighlightedText {
623 HighlightedText {
624 text: self.text.into(),
625 highlights: self.highlights,
626 }
627 }
628
629 pub fn add_text_from_buffer_range<T: ToOffset>(
630 &mut self,
631 range: Range<T>,
632 snapshot: &text::BufferSnapshot,
633 syntax_snapshot: &SyntaxSnapshot,
634 override_style: Option<HighlightStyle>,
635 syntax_theme: &SyntaxTheme,
636 ) {
637 let range = range.to_offset(snapshot);
638 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
639 let start = self.text.len();
640 self.text.push_str(chunk.text);
641 let end = self.text.len();
642
643 if let Some(highlight_style) = chunk
644 .syntax_highlight_id
645 .and_then(|id| id.style(syntax_theme))
646 {
647 let highlight_style = override_style.map_or(highlight_style, |override_style| {
648 highlight_style.highlight(override_style)
649 });
650 self.highlights.push((start..end, highlight_style));
651 } else if let Some(override_style) = override_style {
652 self.highlights.push((start..end, override_style));
653 }
654 }
655 }
656
657 fn highlighted_chunks<'a>(
658 range: Range<usize>,
659 snapshot: &'a text::BufferSnapshot,
660 syntax_snapshot: &'a SyntaxSnapshot,
661 ) -> BufferChunks<'a> {
662 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
663 grammar
664 .highlights_config
665 .as_ref()
666 .map(|config| &config.query)
667 });
668
669 let highlight_maps = captures
670 .grammars()
671 .iter()
672 .map(|grammar| grammar.highlight_map())
673 .collect();
674
675 BufferChunks::new(
676 snapshot.as_rope(),
677 range,
678 Some((captures, highlight_maps)),
679 false,
680 None,
681 )
682 }
683}
684
685#[derive(Clone)]
686pub struct EditPreview {
687 old_snapshot: text::BufferSnapshot,
688 applied_edits_snapshot: text::BufferSnapshot,
689 syntax_snapshot: SyntaxSnapshot,
690}
691
692impl EditPreview {
693 pub fn highlight_edits(
694 &self,
695 current_snapshot: &BufferSnapshot,
696 edits: &[(Range<Anchor>, String)],
697 include_deletions: bool,
698 cx: &App,
699 ) -> HighlightedText {
700 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
701 return HighlightedText::default();
702 };
703
704 let mut highlighted_text = HighlightedTextBuilder::default();
705
706 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
707
708 let insertion_highlight_style = HighlightStyle {
709 background_color: Some(cx.theme().status().created_background),
710 ..Default::default()
711 };
712 let deletion_highlight_style = HighlightStyle {
713 background_color: Some(cx.theme().status().deleted_background),
714 ..Default::default()
715 };
716 let syntax_theme = cx.theme().syntax();
717
718 for (range, edit_text) in edits {
719 let edit_new_end_in_preview_snapshot = range
720 .end
721 .bias_right(&self.old_snapshot)
722 .to_offset(&self.applied_edits_snapshot);
723 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
724
725 let unchanged_range_in_preview_snapshot =
726 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
727 if !unchanged_range_in_preview_snapshot.is_empty() {
728 highlighted_text.add_text_from_buffer_range(
729 unchanged_range_in_preview_snapshot,
730 &self.applied_edits_snapshot,
731 &self.syntax_snapshot,
732 None,
733 syntax_theme,
734 );
735 }
736
737 let range_in_current_snapshot = range.to_offset(current_snapshot);
738 if include_deletions && !range_in_current_snapshot.is_empty() {
739 highlighted_text.add_text_from_buffer_range(
740 range_in_current_snapshot,
741 ¤t_snapshot.text,
742 ¤t_snapshot.syntax,
743 Some(deletion_highlight_style),
744 syntax_theme,
745 );
746 }
747
748 if !edit_text.is_empty() {
749 highlighted_text.add_text_from_buffer_range(
750 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
751 &self.applied_edits_snapshot,
752 &self.syntax_snapshot,
753 Some(insertion_highlight_style),
754 syntax_theme,
755 );
756 }
757
758 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
759 }
760
761 highlighted_text.add_text_from_buffer_range(
762 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
763 &self.applied_edits_snapshot,
764 &self.syntax_snapshot,
765 None,
766 syntax_theme,
767 );
768
769 highlighted_text.build()
770 }
771
772 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
773 let (first, _) = edits.first()?;
774 let (last, _) = edits.last()?;
775
776 let start = first
777 .start
778 .bias_left(&self.old_snapshot)
779 .to_point(&self.applied_edits_snapshot);
780 let end = last
781 .end
782 .bias_right(&self.old_snapshot)
783 .to_point(&self.applied_edits_snapshot);
784
785 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
786 let range = Point::new(start.row, 0)
787 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
788
789 Some(range.to_offset(&self.applied_edits_snapshot))
790 }
791}
792
793#[derive(Clone, Debug, PartialEq, Eq)]
794pub struct BracketMatch {
795 pub open_range: Range<usize>,
796 pub close_range: Range<usize>,
797 pub newline_only: bool,
798}
799
800impl Buffer {
801 /// Create a new buffer with the given base text.
802 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
803 Self::build(
804 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
805 None,
806 Capability::ReadWrite,
807 )
808 }
809
810 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
811 pub fn local_normalized(
812 base_text_normalized: Rope,
813 line_ending: LineEnding,
814 cx: &Context<Self>,
815 ) -> Self {
816 Self::build(
817 TextBuffer::new_normalized(
818 0,
819 cx.entity_id().as_non_zero_u64().into(),
820 line_ending,
821 base_text_normalized,
822 ),
823 None,
824 Capability::ReadWrite,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer.
829 pub fn remote(
830 remote_id: BufferId,
831 replica_id: ReplicaId,
832 capability: Capability,
833 base_text: impl Into<String>,
834 ) -> Self {
835 Self::build(
836 TextBuffer::new(replica_id, remote_id, base_text.into()),
837 None,
838 capability,
839 )
840 }
841
842 /// Create a new buffer that is a replica of a remote buffer, populating its
843 /// state from the given protobuf message.
844 pub fn from_proto(
845 replica_id: ReplicaId,
846 capability: Capability,
847 message: proto::BufferState,
848 file: Option<Arc<dyn File>>,
849 ) -> Result<Self> {
850 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
851 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
852 let mut this = Self::build(buffer, file, capability);
853 this.text.set_line_ending(proto::deserialize_line_ending(
854 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
855 ));
856 this.saved_version = proto::deserialize_version(&message.saved_version);
857 this.saved_mtime = message.saved_mtime.map(|time| time.into());
858 Ok(this)
859 }
860
861 /// Serialize the buffer's state to a protobuf message.
862 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
863 proto::BufferState {
864 id: self.remote_id().into(),
865 file: self.file.as_ref().map(|f| f.to_proto(cx)),
866 base_text: self.base_text().to_string(),
867 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
868 saved_version: proto::serialize_version(&self.saved_version),
869 saved_mtime: self.saved_mtime.map(|time| time.into()),
870 }
871 }
872
873 /// Serialize as protobufs all of the changes to the buffer since the given version.
874 pub fn serialize_ops(
875 &self,
876 since: Option<clock::Global>,
877 cx: &App,
878 ) -> Task<Vec<proto::Operation>> {
879 let mut operations = Vec::new();
880 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
881
882 operations.extend(self.remote_selections.iter().map(|(_, set)| {
883 proto::serialize_operation(&Operation::UpdateSelections {
884 selections: set.selections.clone(),
885 lamport_timestamp: set.lamport_timestamp,
886 line_mode: set.line_mode,
887 cursor_shape: set.cursor_shape,
888 })
889 }));
890
891 for (server_id, diagnostics) in &self.diagnostics {
892 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
893 lamport_timestamp: self.diagnostics_timestamp,
894 server_id: *server_id,
895 diagnostics: diagnostics.iter().cloned().collect(),
896 }));
897 }
898
899 for (server_id, completions) in &self.completion_triggers_per_language_server {
900 operations.push(proto::serialize_operation(
901 &Operation::UpdateCompletionTriggers {
902 triggers: completions.iter().cloned().collect(),
903 lamport_timestamp: self.completion_triggers_timestamp,
904 server_id: *server_id,
905 },
906 ));
907 }
908
909 let text_operations = self.text.operations().clone();
910 cx.background_spawn(async move {
911 let since = since.unwrap_or_default();
912 operations.extend(
913 text_operations
914 .iter()
915 .filter(|(_, op)| !since.observed(op.timestamp()))
916 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
917 );
918 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
919 operations
920 })
921 }
922
923 /// Assign a language to the buffer, returning the buffer.
924 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
925 self.set_language(Some(language), cx);
926 self
927 }
928
929 /// Returns the [`Capability`] of this buffer.
930 pub fn capability(&self) -> Capability {
931 self.capability
932 }
933
934 /// Whether this buffer can only be read.
935 pub fn read_only(&self) -> bool {
936 self.capability == Capability::ReadOnly
937 }
938
939 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
940 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
941 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
942 let snapshot = buffer.snapshot();
943 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
944 Self {
945 saved_mtime,
946 saved_version: buffer.version(),
947 preview_version: buffer.version(),
948 reload_task: None,
949 transaction_depth: 0,
950 was_dirty_before_starting_transaction: None,
951 has_unsaved_edits: Cell::new((buffer.version(), false)),
952 text: buffer,
953 branch_state: None,
954 file,
955 capability,
956 syntax_map,
957 reparse: None,
958 non_text_state_update_count: 0,
959 sync_parse_timeout: Duration::from_millis(1),
960 parse_status: watch::channel(ParseStatus::Idle),
961 autoindent_requests: Default::default(),
962 wait_for_autoindent_txs: Default::default(),
963 pending_autoindent: Default::default(),
964 language: None,
965 remote_selections: Default::default(),
966 diagnostics: Default::default(),
967 diagnostics_timestamp: Default::default(),
968 completion_triggers: Default::default(),
969 completion_triggers_per_language_server: Default::default(),
970 completion_triggers_timestamp: Default::default(),
971 deferred_ops: OperationQueue::new(),
972 has_conflict: false,
973 change_bits: Default::default(),
974 _subscriptions: Vec::new(),
975 }
976 }
977
978 pub fn build_snapshot(
979 text: Rope,
980 language: Option<Arc<Language>>,
981 language_registry: Option<Arc<LanguageRegistry>>,
982 cx: &mut App,
983 ) -> impl Future<Output = BufferSnapshot> + use<> {
984 let entity_id = cx.reserve_entity::<Self>().entity_id();
985 let buffer_id = entity_id.as_non_zero_u64().into();
986 async move {
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
989 let mut syntax = SyntaxMap::new(&text).snapshot();
990 if let Some(language) = language.clone() {
991 let language_registry = language_registry.clone();
992 syntax.reparse(&text, language_registry, language);
993 }
994 BufferSnapshot {
995 text,
996 syntax,
997 file: None,
998 diagnostics: Default::default(),
999 remote_selections: Default::default(),
1000 language,
1001 non_text_state_update_count: 0,
1002 }
1003 }
1004 }
1005
1006 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1007 let entity_id = cx.reserve_entity::<Self>().entity_id();
1008 let buffer_id = entity_id.as_non_zero_u64().into();
1009 let text =
1010 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1011 let syntax = SyntaxMap::new(&text).snapshot();
1012 BufferSnapshot {
1013 text,
1014 syntax,
1015 file: None,
1016 diagnostics: Default::default(),
1017 remote_selections: Default::default(),
1018 language: None,
1019 non_text_state_update_count: 0,
1020 }
1021 }
1022
1023 #[cfg(any(test, feature = "test-support"))]
1024 pub fn build_snapshot_sync(
1025 text: Rope,
1026 language: Option<Arc<Language>>,
1027 language_registry: Option<Arc<LanguageRegistry>>,
1028 cx: &mut App,
1029 ) -> BufferSnapshot {
1030 let entity_id = cx.reserve_entity::<Self>().entity_id();
1031 let buffer_id = entity_id.as_non_zero_u64().into();
1032 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1033 let mut syntax = SyntaxMap::new(&text).snapshot();
1034 if let Some(language) = language.clone() {
1035 syntax.reparse(&text, language_registry, language);
1036 }
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: None,
1041 diagnostics: Default::default(),
1042 remote_selections: Default::default(),
1043 language,
1044 non_text_state_update_count: 0,
1045 }
1046 }
1047
1048 /// Retrieve a snapshot of the buffer's current state. This is computationally
1049 /// cheap, and allows reading from the buffer on a background thread.
1050 pub fn snapshot(&self) -> BufferSnapshot {
1051 let text = self.text.snapshot();
1052 let mut syntax_map = self.syntax_map.lock();
1053 syntax_map.interpolate(&text);
1054 let syntax = syntax_map.snapshot();
1055
1056 BufferSnapshot {
1057 text,
1058 syntax,
1059 file: self.file.clone(),
1060 remote_selections: self.remote_selections.clone(),
1061 diagnostics: self.diagnostics.clone(),
1062 language: self.language.clone(),
1063 non_text_state_update_count: self.non_text_state_update_count,
1064 }
1065 }
1066
1067 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1068 let this = cx.entity();
1069 cx.new(|cx| {
1070 let mut branch = Self {
1071 branch_state: Some(BufferBranchState {
1072 base_buffer: this.clone(),
1073 merged_operations: Default::default(),
1074 }),
1075 language: self.language.clone(),
1076 has_conflict: self.has_conflict,
1077 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1078 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1079 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1080 };
1081 if let Some(language_registry) = self.language_registry() {
1082 branch.set_language_registry(language_registry);
1083 }
1084
1085 // Reparse the branch buffer so that we get syntax highlighting immediately.
1086 branch.reparse(cx);
1087
1088 branch
1089 })
1090 }
1091
1092 pub fn preview_edits(
1093 &self,
1094 edits: Arc<[(Range<Anchor>, String)]>,
1095 cx: &App,
1096 ) -> Task<EditPreview> {
1097 let registry = self.language_registry();
1098 let language = self.language().cloned();
1099 let old_snapshot = self.text.snapshot();
1100 let mut branch_buffer = self.text.branch();
1101 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1102 cx.background_spawn(async move {
1103 if !edits.is_empty() {
1104 if let Some(language) = language.clone() {
1105 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1106 }
1107
1108 branch_buffer.edit(edits.iter().cloned());
1109 let snapshot = branch_buffer.snapshot();
1110 syntax_snapshot.interpolate(&snapshot);
1111
1112 if let Some(language) = language {
1113 syntax_snapshot.reparse(&snapshot, registry, language);
1114 }
1115 }
1116 EditPreview {
1117 old_snapshot,
1118 applied_edits_snapshot: branch_buffer.snapshot(),
1119 syntax_snapshot,
1120 }
1121 })
1122 }
1123
1124 /// Applies all of the changes in this buffer that intersect any of the
1125 /// given `ranges` to its base buffer.
1126 ///
1127 /// If `ranges` is empty, then all changes will be applied. This buffer must
1128 /// be a branch buffer to call this method.
1129 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1130 let Some(base_buffer) = self.base_buffer() else {
1131 debug_panic!("not a branch buffer");
1132 return;
1133 };
1134
1135 let mut ranges = if ranges.is_empty() {
1136 &[0..usize::MAX]
1137 } else {
1138 ranges.as_slice()
1139 }
1140 .iter()
1141 .peekable();
1142
1143 let mut edits = Vec::new();
1144 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1145 let mut is_included = false;
1146 while let Some(range) = ranges.peek() {
1147 if range.end < edit.new.start {
1148 ranges.next().unwrap();
1149 } else {
1150 if range.start <= edit.new.end {
1151 is_included = true;
1152 }
1153 break;
1154 }
1155 }
1156
1157 if is_included {
1158 edits.push((
1159 edit.old.clone(),
1160 self.text_for_range(edit.new.clone()).collect::<String>(),
1161 ));
1162 }
1163 }
1164
1165 let operation = base_buffer.update(cx, |base_buffer, cx| {
1166 // cx.emit(BufferEvent::DiffBaseChanged);
1167 base_buffer.edit(edits, None, cx)
1168 });
1169
1170 if let Some(operation) = operation
1171 && let Some(BufferBranchState {
1172 merged_operations, ..
1173 }) = &mut self.branch_state
1174 {
1175 merged_operations.push(operation);
1176 }
1177 }
1178
1179 fn on_base_buffer_event(
1180 &mut self,
1181 _: Entity<Buffer>,
1182 event: &BufferEvent,
1183 cx: &mut Context<Self>,
1184 ) {
1185 let BufferEvent::Operation { operation, .. } = event else {
1186 return;
1187 };
1188 let Some(BufferBranchState {
1189 merged_operations, ..
1190 }) = &mut self.branch_state
1191 else {
1192 return;
1193 };
1194
1195 let mut operation_to_undo = None;
1196 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1197 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1198 {
1199 merged_operations.remove(ix);
1200 operation_to_undo = Some(operation.timestamp);
1201 }
1202
1203 self.apply_ops([operation.clone()], cx);
1204
1205 if let Some(timestamp) = operation_to_undo {
1206 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1207 self.undo_operations(counts, cx);
1208 }
1209 }
1210
1211 #[cfg(test)]
1212 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1213 &self.text
1214 }
1215
1216 /// Retrieve a snapshot of the buffer's raw text, without any
1217 /// language-related state like the syntax tree or diagnostics.
1218 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1219 self.text.snapshot()
1220 }
1221
1222 /// The file associated with the buffer, if any.
1223 pub fn file(&self) -> Option<&Arc<dyn File>> {
1224 self.file.as_ref()
1225 }
1226
1227 /// The version of the buffer that was last saved or reloaded from disk.
1228 pub fn saved_version(&self) -> &clock::Global {
1229 &self.saved_version
1230 }
1231
1232 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1233 pub fn saved_mtime(&self) -> Option<MTime> {
1234 self.saved_mtime
1235 }
1236
1237 /// Assign a language to the buffer.
1238 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1239 self.non_text_state_update_count += 1;
1240 self.syntax_map.lock().clear(&self.text);
1241 self.language = language;
1242 self.was_changed();
1243 self.reparse(cx);
1244 cx.emit(BufferEvent::LanguageChanged);
1245 }
1246
1247 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1248 /// other languages if parts of the buffer are written in different languages.
1249 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1250 self.syntax_map
1251 .lock()
1252 .set_language_registry(language_registry);
1253 }
1254
1255 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1256 self.syntax_map.lock().language_registry()
1257 }
1258
1259 /// Assign the line ending type to the buffer.
1260 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1261 self.text.set_line_ending(line_ending);
1262
1263 let lamport_timestamp = self.text.lamport_clock.tick();
1264 self.send_operation(
1265 Operation::UpdateLineEnding {
1266 line_ending,
1267 lamport_timestamp,
1268 },
1269 true,
1270 cx,
1271 );
1272 }
1273
1274 /// Assign the buffer a new [`Capability`].
1275 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1276 if self.capability != capability {
1277 self.capability = capability;
1278 cx.emit(BufferEvent::CapabilityChanged)
1279 }
1280 }
1281
1282 /// This method is called to signal that the buffer has been saved.
1283 pub fn did_save(
1284 &mut self,
1285 version: clock::Global,
1286 mtime: Option<MTime>,
1287 cx: &mut Context<Self>,
1288 ) {
1289 self.saved_version = version;
1290 self.has_unsaved_edits
1291 .set((self.saved_version().clone(), false));
1292 self.has_conflict = false;
1293 self.saved_mtime = mtime;
1294 self.was_changed();
1295 cx.emit(BufferEvent::Saved);
1296 cx.notify();
1297 }
1298
1299 /// Reloads the contents of the buffer from disk.
1300 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1301 let (tx, rx) = futures::channel::oneshot::channel();
1302 let prev_version = self.text.version();
1303 self.reload_task = Some(cx.spawn(async move |this, cx| {
1304 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1305 let file = this.file.as_ref()?.as_local()?;
1306
1307 Some((file.disk_state().mtime(), file.load(cx)))
1308 })?
1309 else {
1310 return Ok(());
1311 };
1312
1313 let new_text = new_text.await?;
1314 let diff = this
1315 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1316 .await;
1317 this.update(cx, |this, cx| {
1318 if this.version() == diff.base_version {
1319 this.finalize_last_transaction();
1320 this.apply_diff(diff, cx);
1321 tx.send(this.finalize_last_transaction().cloned()).ok();
1322 this.has_conflict = false;
1323 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1324 } else {
1325 if !diff.edits.is_empty()
1326 || this
1327 .edits_since::<usize>(&diff.base_version)
1328 .next()
1329 .is_some()
1330 {
1331 this.has_conflict = true;
1332 }
1333
1334 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1335 }
1336
1337 this.reload_task.take();
1338 })
1339 }));
1340 rx
1341 }
1342
1343 /// This method is called to signal that the buffer has been reloaded.
1344 pub fn did_reload(
1345 &mut self,
1346 version: clock::Global,
1347 line_ending: LineEnding,
1348 mtime: Option<MTime>,
1349 cx: &mut Context<Self>,
1350 ) {
1351 self.saved_version = version;
1352 self.has_unsaved_edits
1353 .set((self.saved_version.clone(), false));
1354 self.text.set_line_ending(line_ending);
1355 self.saved_mtime = mtime;
1356 cx.emit(BufferEvent::Reloaded);
1357 cx.notify();
1358 }
1359
1360 /// Updates the [`File`] backing this buffer. This should be called when
1361 /// the file has changed or has been deleted.
1362 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1363 let was_dirty = self.is_dirty();
1364 let mut file_changed = false;
1365
1366 if let Some(old_file) = self.file.as_ref() {
1367 if new_file.path() != old_file.path() {
1368 file_changed = true;
1369 }
1370
1371 let old_state = old_file.disk_state();
1372 let new_state = new_file.disk_state();
1373 if old_state != new_state {
1374 file_changed = true;
1375 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1376 cx.emit(BufferEvent::ReloadNeeded)
1377 }
1378 }
1379 } else {
1380 file_changed = true;
1381 };
1382
1383 self.file = Some(new_file);
1384 if file_changed {
1385 self.was_changed();
1386 self.non_text_state_update_count += 1;
1387 if was_dirty != self.is_dirty() {
1388 cx.emit(BufferEvent::DirtyChanged);
1389 }
1390 cx.emit(BufferEvent::FileHandleChanged);
1391 cx.notify();
1392 }
1393 }
1394
1395 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1396 Some(self.branch_state.as_ref()?.base_buffer.clone())
1397 }
1398
1399 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1400 pub fn language(&self) -> Option<&Arc<Language>> {
1401 self.language.as_ref()
1402 }
1403
1404 /// Returns the [`Language`] at the given location.
1405 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1406 let offset = position.to_offset(self);
1407 let mut is_first = true;
1408 let start_anchor = self.anchor_before(offset);
1409 let end_anchor = self.anchor_after(offset);
1410 self.syntax_map
1411 .lock()
1412 .layers_for_range(offset..offset, &self.text, false)
1413 .filter(|layer| {
1414 if is_first {
1415 is_first = false;
1416 return true;
1417 }
1418
1419 layer
1420 .included_sub_ranges
1421 .map(|sub_ranges| {
1422 sub_ranges.iter().any(|sub_range| {
1423 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1424 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1425 !is_before_start && !is_after_end
1426 })
1427 })
1428 .unwrap_or(true)
1429 })
1430 .last()
1431 .map(|info| info.language.clone())
1432 .or_else(|| self.language.clone())
1433 }
1434
1435 /// Returns each [`Language`] for the active syntax layers at the given location.
1436 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1437 let offset = position.to_offset(self);
1438 let mut languages: Vec<Arc<Language>> = self
1439 .syntax_map
1440 .lock()
1441 .layers_for_range(offset..offset, &self.text, false)
1442 .map(|info| info.language.clone())
1443 .collect();
1444
1445 if languages.is_empty()
1446 && let Some(buffer_language) = self.language()
1447 {
1448 languages.push(buffer_language.clone());
1449 }
1450
1451 languages
1452 }
1453
1454 /// An integer version number that accounts for all updates besides
1455 /// the buffer's text itself (which is versioned via a version vector).
1456 pub fn non_text_state_update_count(&self) -> usize {
1457 self.non_text_state_update_count
1458 }
1459
1460 /// Whether the buffer is being parsed in the background.
1461 #[cfg(any(test, feature = "test-support"))]
1462 pub fn is_parsing(&self) -> bool {
1463 self.reparse.is_some()
1464 }
1465
1466 /// Indicates whether the buffer contains any regions that may be
1467 /// written in a language that hasn't been loaded yet.
1468 pub fn contains_unknown_injections(&self) -> bool {
1469 self.syntax_map.lock().contains_unknown_injections()
1470 }
1471
1472 #[cfg(any(test, feature = "test-support"))]
1473 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1474 self.sync_parse_timeout = timeout;
1475 }
1476
1477 /// Called after an edit to synchronize the buffer's main parse tree with
1478 /// the buffer's new underlying state.
1479 ///
1480 /// Locks the syntax map and interpolates the edits since the last reparse
1481 /// into the foreground syntax tree.
1482 ///
1483 /// Then takes a stable snapshot of the syntax map before unlocking it.
1484 /// The snapshot with the interpolated edits is sent to a background thread,
1485 /// where we ask Tree-sitter to perform an incremental parse.
1486 ///
1487 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1488 /// waiting on the parse to complete. As soon as it completes, we proceed
1489 /// synchronously, unless a 1ms timeout elapses.
1490 ///
1491 /// If we time out waiting on the parse, we spawn a second task waiting
1492 /// until the parse does complete and return with the interpolated tree still
1493 /// in the foreground. When the background parse completes, call back into
1494 /// the main thread and assign the foreground parse state.
1495 ///
1496 /// If the buffer or grammar changed since the start of the background parse,
1497 /// initiate an additional reparse recursively. To avoid concurrent parses
1498 /// for the same buffer, we only initiate a new parse if we are not already
1499 /// parsing in the background.
1500 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1501 if self.reparse.is_some() {
1502 return;
1503 }
1504 let language = if let Some(language) = self.language.clone() {
1505 language
1506 } else {
1507 return;
1508 };
1509
1510 let text = self.text_snapshot();
1511 let parsed_version = self.version();
1512
1513 let mut syntax_map = self.syntax_map.lock();
1514 syntax_map.interpolate(&text);
1515 let language_registry = syntax_map.language_registry();
1516 let mut syntax_snapshot = syntax_map.snapshot();
1517 drop(syntax_map);
1518
1519 let parse_task = cx.background_spawn({
1520 let language = language.clone();
1521 let language_registry = language_registry.clone();
1522 async move {
1523 syntax_snapshot.reparse(&text, language_registry, language);
1524 syntax_snapshot
1525 }
1526 });
1527
1528 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1529 match cx
1530 .background_executor()
1531 .block_with_timeout(self.sync_parse_timeout, parse_task)
1532 {
1533 Ok(new_syntax_snapshot) => {
1534 self.did_finish_parsing(new_syntax_snapshot, cx);
1535 self.reparse = None;
1536 }
1537 Err(parse_task) => {
1538 self.reparse = Some(cx.spawn(async move |this, cx| {
1539 let new_syntax_map = parse_task.await;
1540 this.update(cx, move |this, cx| {
1541 let grammar_changed =
1542 this.language.as_ref().is_none_or(|current_language| {
1543 !Arc::ptr_eq(&language, current_language)
1544 });
1545 let language_registry_changed = new_syntax_map
1546 .contains_unknown_injections()
1547 && language_registry.is_some_and(|registry| {
1548 registry.version() != new_syntax_map.language_registry_version()
1549 });
1550 let parse_again = language_registry_changed
1551 || grammar_changed
1552 || this.version.changed_since(&parsed_version);
1553 this.did_finish_parsing(new_syntax_map, cx);
1554 this.reparse = None;
1555 if parse_again {
1556 this.reparse(cx);
1557 }
1558 })
1559 .ok();
1560 }));
1561 }
1562 }
1563 }
1564
1565 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1566 self.was_changed();
1567 self.non_text_state_update_count += 1;
1568 self.syntax_map.lock().did_parse(syntax_snapshot);
1569 self.request_autoindent(cx);
1570 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1571 cx.emit(BufferEvent::Reparsed);
1572 cx.notify();
1573 }
1574
1575 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1576 self.parse_status.1.clone()
1577 }
1578
1579 /// Assign to the buffer a set of diagnostics created by a given language server.
1580 pub fn update_diagnostics(
1581 &mut self,
1582 server_id: LanguageServerId,
1583 diagnostics: DiagnosticSet,
1584 cx: &mut Context<Self>,
1585 ) {
1586 let lamport_timestamp = self.text.lamport_clock.tick();
1587 let op = Operation::UpdateDiagnostics {
1588 server_id,
1589 diagnostics: diagnostics.iter().cloned().collect(),
1590 lamport_timestamp,
1591 };
1592
1593 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1594 self.send_operation(op, true, cx);
1595 }
1596
1597 pub fn buffer_diagnostics(
1598 &self,
1599 for_server: Option<LanguageServerId>,
1600 ) -> Vec<&DiagnosticEntry<Anchor>> {
1601 match for_server {
1602 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1603 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1604 Err(_) => Vec::new(),
1605 },
1606 None => self
1607 .diagnostics
1608 .iter()
1609 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1610 .collect(),
1611 }
1612 }
1613
1614 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1615 if let Some(indent_sizes) = self.compute_autoindents() {
1616 let indent_sizes = cx.background_spawn(indent_sizes);
1617 match cx
1618 .background_executor()
1619 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1620 {
1621 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1622 Err(indent_sizes) => {
1623 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1624 let indent_sizes = indent_sizes.await;
1625 this.update(cx, |this, cx| {
1626 this.apply_autoindents(indent_sizes, cx);
1627 })
1628 .ok();
1629 }));
1630 }
1631 }
1632 } else {
1633 self.autoindent_requests.clear();
1634 for tx in self.wait_for_autoindent_txs.drain(..) {
1635 tx.send(()).ok();
1636 }
1637 }
1638 }
1639
1640 fn compute_autoindents(
1641 &self,
1642 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1643 let max_rows_between_yields = 100;
1644 let snapshot = self.snapshot();
1645 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1646 return None;
1647 }
1648
1649 let autoindent_requests = self.autoindent_requests.clone();
1650 Some(async move {
1651 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1652 for request in autoindent_requests {
1653 // Resolve each edited range to its row in the current buffer and in the
1654 // buffer before this batch of edits.
1655 let mut row_ranges = Vec::new();
1656 let mut old_to_new_rows = BTreeMap::new();
1657 let mut language_indent_sizes_by_new_row = Vec::new();
1658 for entry in &request.entries {
1659 let position = entry.range.start;
1660 let new_row = position.to_point(&snapshot).row;
1661 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1662 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1663
1664 if !entry.first_line_is_new {
1665 let old_row = position.to_point(&request.before_edit).row;
1666 old_to_new_rows.insert(old_row, new_row);
1667 }
1668 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1669 }
1670
1671 // Build a map containing the suggested indentation for each of the edited lines
1672 // with respect to the state of the buffer before these edits. This map is keyed
1673 // by the rows for these lines in the current state of the buffer.
1674 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1675 let old_edited_ranges =
1676 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1677 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1678 let mut language_indent_size = IndentSize::default();
1679 for old_edited_range in old_edited_ranges {
1680 let suggestions = request
1681 .before_edit
1682 .suggest_autoindents(old_edited_range.clone())
1683 .into_iter()
1684 .flatten();
1685 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1686 if let Some(suggestion) = suggestion {
1687 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1688
1689 // Find the indent size based on the language for this row.
1690 while let Some((row, size)) = language_indent_sizes.peek() {
1691 if *row > new_row {
1692 break;
1693 }
1694 language_indent_size = *size;
1695 language_indent_sizes.next();
1696 }
1697
1698 let suggested_indent = old_to_new_rows
1699 .get(&suggestion.basis_row)
1700 .and_then(|from_row| {
1701 Some(old_suggestions.get(from_row).copied()?.0)
1702 })
1703 .unwrap_or_else(|| {
1704 request
1705 .before_edit
1706 .indent_size_for_line(suggestion.basis_row)
1707 })
1708 .with_delta(suggestion.delta, language_indent_size);
1709 old_suggestions
1710 .insert(new_row, (suggested_indent, suggestion.within_error));
1711 }
1712 }
1713 yield_now().await;
1714 }
1715
1716 // Compute new suggestions for each line, but only include them in the result
1717 // if they differ from the old suggestion for that line.
1718 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1719 let mut language_indent_size = IndentSize::default();
1720 for (row_range, original_indent_column) in row_ranges {
1721 let new_edited_row_range = if request.is_block_mode {
1722 row_range.start..row_range.start + 1
1723 } else {
1724 row_range.clone()
1725 };
1726
1727 let suggestions = snapshot
1728 .suggest_autoindents(new_edited_row_range.clone())
1729 .into_iter()
1730 .flatten();
1731 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1732 if let Some(suggestion) = suggestion {
1733 // Find the indent size based on the language for this row.
1734 while let Some((row, size)) = language_indent_sizes.peek() {
1735 if *row > new_row {
1736 break;
1737 }
1738 language_indent_size = *size;
1739 language_indent_sizes.next();
1740 }
1741
1742 let suggested_indent = indent_sizes
1743 .get(&suggestion.basis_row)
1744 .copied()
1745 .map(|e| e.0)
1746 .unwrap_or_else(|| {
1747 snapshot.indent_size_for_line(suggestion.basis_row)
1748 })
1749 .with_delta(suggestion.delta, language_indent_size);
1750
1751 if old_suggestions.get(&new_row).is_none_or(
1752 |(old_indentation, was_within_error)| {
1753 suggested_indent != *old_indentation
1754 && (!suggestion.within_error || *was_within_error)
1755 },
1756 ) {
1757 indent_sizes.insert(
1758 new_row,
1759 (suggested_indent, request.ignore_empty_lines),
1760 );
1761 }
1762 }
1763 }
1764
1765 if let (true, Some(original_indent_column)) =
1766 (request.is_block_mode, original_indent_column)
1767 {
1768 let new_indent =
1769 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1770 *indent
1771 } else {
1772 snapshot.indent_size_for_line(row_range.start)
1773 };
1774 let delta = new_indent.len as i64 - original_indent_column as i64;
1775 if delta != 0 {
1776 for row in row_range.skip(1) {
1777 indent_sizes.entry(row).or_insert_with(|| {
1778 let mut size = snapshot.indent_size_for_line(row);
1779 if size.kind == new_indent.kind {
1780 match delta.cmp(&0) {
1781 Ordering::Greater => size.len += delta as u32,
1782 Ordering::Less => {
1783 size.len = size.len.saturating_sub(-delta as u32)
1784 }
1785 Ordering::Equal => {}
1786 }
1787 }
1788 (size, request.ignore_empty_lines)
1789 });
1790 }
1791 }
1792 }
1793
1794 yield_now().await;
1795 }
1796 }
1797
1798 indent_sizes
1799 .into_iter()
1800 .filter_map(|(row, (indent, ignore_empty_lines))| {
1801 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1802 None
1803 } else {
1804 Some((row, indent))
1805 }
1806 })
1807 .collect()
1808 })
1809 }
1810
1811 fn apply_autoindents(
1812 &mut self,
1813 indent_sizes: BTreeMap<u32, IndentSize>,
1814 cx: &mut Context<Self>,
1815 ) {
1816 self.autoindent_requests.clear();
1817 for tx in self.wait_for_autoindent_txs.drain(..) {
1818 tx.send(()).ok();
1819 }
1820
1821 let edits: Vec<_> = indent_sizes
1822 .into_iter()
1823 .filter_map(|(row, indent_size)| {
1824 let current_size = indent_size_for_line(self, row);
1825 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1826 })
1827 .collect();
1828
1829 let preserve_preview = self.preserve_preview();
1830 self.edit(edits, None, cx);
1831 if preserve_preview {
1832 self.refresh_preview();
1833 }
1834 }
1835
1836 /// Create a minimal edit that will cause the given row to be indented
1837 /// with the given size. After applying this edit, the length of the line
1838 /// will always be at least `new_size.len`.
1839 pub fn edit_for_indent_size_adjustment(
1840 row: u32,
1841 current_size: IndentSize,
1842 new_size: IndentSize,
1843 ) -> Option<(Range<Point>, String)> {
1844 if new_size.kind == current_size.kind {
1845 match new_size.len.cmp(¤t_size.len) {
1846 Ordering::Greater => {
1847 let point = Point::new(row, 0);
1848 Some((
1849 point..point,
1850 iter::repeat(new_size.char())
1851 .take((new_size.len - current_size.len) as usize)
1852 .collect::<String>(),
1853 ))
1854 }
1855
1856 Ordering::Less => Some((
1857 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1858 String::new(),
1859 )),
1860
1861 Ordering::Equal => None,
1862 }
1863 } else {
1864 Some((
1865 Point::new(row, 0)..Point::new(row, current_size.len),
1866 iter::repeat(new_size.char())
1867 .take(new_size.len as usize)
1868 .collect::<String>(),
1869 ))
1870 }
1871 }
1872
1873 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1874 /// and the given new text.
1875 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1876 let old_text = self.as_rope().clone();
1877 let base_version = self.version();
1878 cx.background_executor()
1879 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1880 let old_text = old_text.to_string();
1881 let line_ending = LineEnding::detect(&new_text);
1882 LineEnding::normalize(&mut new_text);
1883 let edits = text_diff(&old_text, &new_text);
1884 Diff {
1885 base_version,
1886 line_ending,
1887 edits,
1888 }
1889 })
1890 }
1891
1892 /// Spawns a background task that searches the buffer for any whitespace
1893 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1894 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1895 let old_text = self.as_rope().clone();
1896 let line_ending = self.line_ending();
1897 let base_version = self.version();
1898 cx.background_spawn(async move {
1899 let ranges = trailing_whitespace_ranges(&old_text);
1900 let empty = Arc::<str>::from("");
1901 Diff {
1902 base_version,
1903 line_ending,
1904 edits: ranges
1905 .into_iter()
1906 .map(|range| (range, empty.clone()))
1907 .collect(),
1908 }
1909 })
1910 }
1911
1912 /// Ensures that the buffer ends with a single newline character, and
1913 /// no other whitespace. Skips if the buffer is empty.
1914 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1915 let len = self.len();
1916 if len == 0 {
1917 return;
1918 }
1919 let mut offset = len;
1920 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1921 let non_whitespace_len = chunk
1922 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1923 .len();
1924 offset -= chunk.len();
1925 offset += non_whitespace_len;
1926 if non_whitespace_len != 0 {
1927 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1928 return;
1929 }
1930 break;
1931 }
1932 }
1933 self.edit([(offset..len, "\n")], None, cx);
1934 }
1935
1936 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1937 /// calculated, then adjust the diff to account for those changes, and discard any
1938 /// parts of the diff that conflict with those changes.
1939 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1940 let snapshot = self.snapshot();
1941 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1942 let mut delta = 0;
1943 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1944 while let Some(edit_since) = edits_since.peek() {
1945 // If the edit occurs after a diff hunk, then it does not
1946 // affect that hunk.
1947 if edit_since.old.start > range.end {
1948 break;
1949 }
1950 // If the edit precedes the diff hunk, then adjust the hunk
1951 // to reflect the edit.
1952 else if edit_since.old.end < range.start {
1953 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1954 edits_since.next();
1955 }
1956 // If the edit intersects a diff hunk, then discard that hunk.
1957 else {
1958 return None;
1959 }
1960 }
1961
1962 let start = (range.start as i64 + delta) as usize;
1963 let end = (range.end as i64 + delta) as usize;
1964 Some((start..end, new_text))
1965 });
1966
1967 self.start_transaction();
1968 self.text.set_line_ending(diff.line_ending);
1969 self.edit(adjusted_edits, None, cx);
1970 self.end_transaction(cx)
1971 }
1972
1973 fn has_unsaved_edits(&self) -> bool {
1974 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1975
1976 if last_version == self.version {
1977 self.has_unsaved_edits
1978 .set((last_version, has_unsaved_edits));
1979 return has_unsaved_edits;
1980 }
1981
1982 let has_edits = self.has_edits_since(&self.saved_version);
1983 self.has_unsaved_edits
1984 .set((self.version.clone(), has_edits));
1985 has_edits
1986 }
1987
1988 /// Checks if the buffer has unsaved changes.
1989 pub fn is_dirty(&self) -> bool {
1990 if self.capability == Capability::ReadOnly {
1991 return false;
1992 }
1993 if self.has_conflict {
1994 return true;
1995 }
1996 match self.file.as_ref().map(|f| f.disk_state()) {
1997 Some(DiskState::New) | Some(DiskState::Deleted) => {
1998 !self.is_empty() && self.has_unsaved_edits()
1999 }
2000 _ => self.has_unsaved_edits(),
2001 }
2002 }
2003
2004 /// Checks if the buffer and its file have both changed since the buffer
2005 /// was last saved or reloaded.
2006 pub fn has_conflict(&self) -> bool {
2007 if self.has_conflict {
2008 return true;
2009 }
2010 let Some(file) = self.file.as_ref() else {
2011 return false;
2012 };
2013 match file.disk_state() {
2014 DiskState::New => false,
2015 DiskState::Present { mtime } => match self.saved_mtime {
2016 Some(saved_mtime) => {
2017 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2018 }
2019 None => true,
2020 },
2021 DiskState::Deleted => false,
2022 }
2023 }
2024
2025 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2026 pub fn subscribe(&mut self) -> Subscription {
2027 self.text.subscribe()
2028 }
2029
2030 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2031 ///
2032 /// This allows downstream code to check if the buffer's text has changed without
2033 /// waiting for an effect cycle, which would be required if using eents.
2034 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2035 if let Err(ix) = self
2036 .change_bits
2037 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2038 {
2039 self.change_bits.insert(ix, bit);
2040 }
2041 }
2042
2043 fn was_changed(&mut self) {
2044 self.change_bits.retain(|change_bit| {
2045 change_bit.upgrade().is_some_and(|bit| {
2046 bit.replace(true);
2047 true
2048 })
2049 });
2050 }
2051
2052 /// Starts a transaction, if one is not already in-progress. When undoing or
2053 /// redoing edits, all of the edits performed within a transaction are undone
2054 /// or redone together.
2055 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2056 self.start_transaction_at(Instant::now())
2057 }
2058
2059 /// Starts a transaction, providing the current time. Subsequent transactions
2060 /// that occur within a short period of time will be grouped together. This
2061 /// is controlled by the buffer's undo grouping duration.
2062 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2063 self.transaction_depth += 1;
2064 if self.was_dirty_before_starting_transaction.is_none() {
2065 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2066 }
2067 self.text.start_transaction_at(now)
2068 }
2069
2070 /// Terminates the current transaction, if this is the outermost transaction.
2071 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2072 self.end_transaction_at(Instant::now(), cx)
2073 }
2074
2075 /// Terminates the current transaction, providing the current time. Subsequent transactions
2076 /// that occur within a short period of time will be grouped together. This
2077 /// is controlled by the buffer's undo grouping duration.
2078 pub fn end_transaction_at(
2079 &mut self,
2080 now: Instant,
2081 cx: &mut Context<Self>,
2082 ) -> Option<TransactionId> {
2083 assert!(self.transaction_depth > 0);
2084 self.transaction_depth -= 1;
2085 let was_dirty = if self.transaction_depth == 0 {
2086 self.was_dirty_before_starting_transaction.take().unwrap()
2087 } else {
2088 false
2089 };
2090 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2091 self.did_edit(&start_version, was_dirty, cx);
2092 Some(transaction_id)
2093 } else {
2094 None
2095 }
2096 }
2097
2098 /// Manually add a transaction to the buffer's undo history.
2099 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2100 self.text.push_transaction(transaction, now);
2101 }
2102
2103 /// Differs from `push_transaction` in that it does not clear the redo
2104 /// stack. Intended to be used to create a parent transaction to merge
2105 /// potential child transactions into.
2106 ///
2107 /// The caller is responsible for removing it from the undo history using
2108 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2109 /// are merged into this transaction, the caller is responsible for ensuring
2110 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2111 /// cleared is to create transactions with the usual `start_transaction` and
2112 /// `end_transaction` methods and merging the resulting transactions into
2113 /// the transaction created by this method
2114 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2115 self.text.push_empty_transaction(now)
2116 }
2117
2118 /// Prevent the last transaction from being grouped with any subsequent transactions,
2119 /// even if they occur with the buffer's undo grouping duration.
2120 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2121 self.text.finalize_last_transaction()
2122 }
2123
2124 /// Manually group all changes since a given transaction.
2125 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2126 self.text.group_until_transaction(transaction_id);
2127 }
2128
2129 /// Manually remove a transaction from the buffer's undo history
2130 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2131 self.text.forget_transaction(transaction_id)
2132 }
2133
2134 /// Retrieve a transaction from the buffer's undo history
2135 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2136 self.text.get_transaction(transaction_id)
2137 }
2138
2139 /// Manually merge two transactions in the buffer's undo history.
2140 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2141 self.text.merge_transactions(transaction, destination);
2142 }
2143
2144 /// Waits for the buffer to receive operations with the given timestamps.
2145 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2146 &mut self,
2147 edit_ids: It,
2148 ) -> impl Future<Output = Result<()>> + use<It> {
2149 self.text.wait_for_edits(edit_ids)
2150 }
2151
2152 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2153 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2154 &mut self,
2155 anchors: It,
2156 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2157 self.text.wait_for_anchors(anchors)
2158 }
2159
2160 /// Waits for the buffer to receive operations up to the given version.
2161 pub fn wait_for_version(
2162 &mut self,
2163 version: clock::Global,
2164 ) -> impl Future<Output = Result<()>> + use<> {
2165 self.text.wait_for_version(version)
2166 }
2167
2168 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2169 /// [`Buffer::wait_for_version`] to resolve with an error.
2170 pub fn give_up_waiting(&mut self) {
2171 self.text.give_up_waiting();
2172 }
2173
2174 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2175 let mut rx = None;
2176 if !self.autoindent_requests.is_empty() {
2177 let channel = oneshot::channel();
2178 self.wait_for_autoindent_txs.push(channel.0);
2179 rx = Some(channel.1);
2180 }
2181 rx
2182 }
2183
2184 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2185 pub fn set_active_selections(
2186 &mut self,
2187 selections: Arc<[Selection<Anchor>]>,
2188 line_mode: bool,
2189 cursor_shape: CursorShape,
2190 cx: &mut Context<Self>,
2191 ) {
2192 let lamport_timestamp = self.text.lamport_clock.tick();
2193 self.remote_selections.insert(
2194 self.text.replica_id(),
2195 SelectionSet {
2196 selections: selections.clone(),
2197 lamport_timestamp,
2198 line_mode,
2199 cursor_shape,
2200 },
2201 );
2202 self.send_operation(
2203 Operation::UpdateSelections {
2204 selections,
2205 line_mode,
2206 lamport_timestamp,
2207 cursor_shape,
2208 },
2209 true,
2210 cx,
2211 );
2212 self.non_text_state_update_count += 1;
2213 cx.notify();
2214 }
2215
2216 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2217 /// this replica.
2218 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2219 if self
2220 .remote_selections
2221 .get(&self.text.replica_id())
2222 .is_none_or(|set| !set.selections.is_empty())
2223 {
2224 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2225 }
2226 }
2227
2228 pub fn set_agent_selections(
2229 &mut self,
2230 selections: Arc<[Selection<Anchor>]>,
2231 line_mode: bool,
2232 cursor_shape: CursorShape,
2233 cx: &mut Context<Self>,
2234 ) {
2235 let lamport_timestamp = self.text.lamport_clock.tick();
2236 self.remote_selections.insert(
2237 AGENT_REPLICA_ID,
2238 SelectionSet {
2239 selections,
2240 lamport_timestamp,
2241 line_mode,
2242 cursor_shape,
2243 },
2244 );
2245 self.non_text_state_update_count += 1;
2246 cx.notify();
2247 }
2248
2249 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2250 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2251 }
2252
2253 /// Replaces the buffer's entire text.
2254 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2255 where
2256 T: Into<Arc<str>>,
2257 {
2258 self.autoindent_requests.clear();
2259 self.edit([(0..self.len(), text)], None, cx)
2260 }
2261
2262 /// Appends the given text to the end of the buffer.
2263 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2264 where
2265 T: Into<Arc<str>>,
2266 {
2267 self.edit([(self.len()..self.len(), text)], None, cx)
2268 }
2269
2270 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2271 /// delete, and a string of text to insert at that location.
2272 ///
2273 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2274 /// request for the edited ranges, which will be processed when the buffer finishes
2275 /// parsing.
2276 ///
2277 /// Parsing takes place at the end of a transaction, and may compute synchronously
2278 /// or asynchronously, depending on the changes.
2279 pub fn edit<I, S, T>(
2280 &mut self,
2281 edits_iter: I,
2282 autoindent_mode: Option<AutoindentMode>,
2283 cx: &mut Context<Self>,
2284 ) -> Option<clock::Lamport>
2285 where
2286 I: IntoIterator<Item = (Range<S>, T)>,
2287 S: ToOffset,
2288 T: Into<Arc<str>>,
2289 {
2290 // Skip invalid edits and coalesce contiguous ones.
2291 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2292
2293 for (range, new_text) in edits_iter {
2294 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2295
2296 if range.start > range.end {
2297 mem::swap(&mut range.start, &mut range.end);
2298 }
2299 let new_text = new_text.into();
2300 if !new_text.is_empty() || !range.is_empty() {
2301 if let Some((prev_range, prev_text)) = edits.last_mut()
2302 && prev_range.end >= range.start
2303 {
2304 prev_range.end = cmp::max(prev_range.end, range.end);
2305 *prev_text = format!("{prev_text}{new_text}").into();
2306 } else {
2307 edits.push((range, new_text));
2308 }
2309 }
2310 }
2311 if edits.is_empty() {
2312 return None;
2313 }
2314
2315 self.start_transaction();
2316 self.pending_autoindent.take();
2317 let autoindent_request = autoindent_mode
2318 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2319
2320 let edit_operation = self.text.edit(edits.iter().cloned());
2321 let edit_id = edit_operation.timestamp();
2322
2323 if let Some((before_edit, mode)) = autoindent_request {
2324 let mut delta = 0isize;
2325 let mut previous_setting = None;
2326 let entries: Vec<_> = edits
2327 .into_iter()
2328 .enumerate()
2329 .zip(&edit_operation.as_edit().unwrap().new_text)
2330 .filter(|((_, (range, _)), _)| {
2331 let language = before_edit.language_at(range.start);
2332 let language_id = language.map(|l| l.id());
2333 if let Some((cached_language_id, auto_indent)) = previous_setting
2334 && cached_language_id == language_id
2335 {
2336 auto_indent
2337 } else {
2338 // The auto-indent setting is not present in editorconfigs, hence
2339 // we can avoid passing the file here.
2340 let auto_indent =
2341 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2342 previous_setting = Some((language_id, auto_indent));
2343 auto_indent
2344 }
2345 })
2346 .map(|((ix, (range, _)), new_text)| {
2347 let new_text_length = new_text.len();
2348 let old_start = range.start.to_point(&before_edit);
2349 let new_start = (delta + range.start as isize) as usize;
2350 let range_len = range.end - range.start;
2351 delta += new_text_length as isize - range_len as isize;
2352
2353 // Decide what range of the insertion to auto-indent, and whether
2354 // the first line of the insertion should be considered a newly-inserted line
2355 // or an edit to an existing line.
2356 let mut range_of_insertion_to_indent = 0..new_text_length;
2357 let mut first_line_is_new = true;
2358
2359 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2360 let old_line_end = before_edit.line_len(old_start.row);
2361
2362 if old_start.column > old_line_start {
2363 first_line_is_new = false;
2364 }
2365
2366 if !new_text.contains('\n')
2367 && (old_start.column + (range_len as u32) < old_line_end
2368 || old_line_end == old_line_start)
2369 {
2370 first_line_is_new = false;
2371 }
2372
2373 // When inserting text starting with a newline, avoid auto-indenting the
2374 // previous line.
2375 if new_text.starts_with('\n') {
2376 range_of_insertion_to_indent.start += 1;
2377 first_line_is_new = true;
2378 }
2379
2380 let mut original_indent_column = None;
2381 if let AutoindentMode::Block {
2382 original_indent_columns,
2383 } = &mode
2384 {
2385 original_indent_column = Some(if new_text.starts_with('\n') {
2386 indent_size_for_text(
2387 new_text[range_of_insertion_to_indent.clone()].chars(),
2388 )
2389 .len
2390 } else {
2391 original_indent_columns
2392 .get(ix)
2393 .copied()
2394 .flatten()
2395 .unwrap_or_else(|| {
2396 indent_size_for_text(
2397 new_text[range_of_insertion_to_indent.clone()].chars(),
2398 )
2399 .len
2400 })
2401 });
2402
2403 // Avoid auto-indenting the line after the edit.
2404 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2405 range_of_insertion_to_indent.end -= 1;
2406 }
2407 }
2408
2409 AutoindentRequestEntry {
2410 first_line_is_new,
2411 original_indent_column,
2412 indent_size: before_edit.language_indent_size_at(range.start, cx),
2413 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2414 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2415 }
2416 })
2417 .collect();
2418
2419 if !entries.is_empty() {
2420 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2421 before_edit,
2422 entries,
2423 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2424 ignore_empty_lines: false,
2425 }));
2426 }
2427 }
2428
2429 self.end_transaction(cx);
2430 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2431 Some(edit_id)
2432 }
2433
2434 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2435 self.was_changed();
2436
2437 if self.edits_since::<usize>(old_version).next().is_none() {
2438 return;
2439 }
2440
2441 self.reparse(cx);
2442 cx.emit(BufferEvent::Edited);
2443 if was_dirty != self.is_dirty() {
2444 cx.emit(BufferEvent::DirtyChanged);
2445 }
2446 cx.notify();
2447 }
2448
2449 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2450 where
2451 I: IntoIterator<Item = Range<T>>,
2452 T: ToOffset + Copy,
2453 {
2454 let before_edit = self.snapshot();
2455 let entries = ranges
2456 .into_iter()
2457 .map(|range| AutoindentRequestEntry {
2458 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2459 first_line_is_new: true,
2460 indent_size: before_edit.language_indent_size_at(range.start, cx),
2461 original_indent_column: None,
2462 })
2463 .collect();
2464 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2465 before_edit,
2466 entries,
2467 is_block_mode: false,
2468 ignore_empty_lines: true,
2469 }));
2470 self.request_autoindent(cx);
2471 }
2472
2473 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2474 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2475 pub fn insert_empty_line(
2476 &mut self,
2477 position: impl ToPoint,
2478 space_above: bool,
2479 space_below: bool,
2480 cx: &mut Context<Self>,
2481 ) -> Point {
2482 let mut position = position.to_point(self);
2483
2484 self.start_transaction();
2485
2486 self.edit(
2487 [(position..position, "\n")],
2488 Some(AutoindentMode::EachLine),
2489 cx,
2490 );
2491
2492 if position.column > 0 {
2493 position += Point::new(1, 0);
2494 }
2495
2496 if !self.is_line_blank(position.row) {
2497 self.edit(
2498 [(position..position, "\n")],
2499 Some(AutoindentMode::EachLine),
2500 cx,
2501 );
2502 }
2503
2504 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2505 self.edit(
2506 [(position..position, "\n")],
2507 Some(AutoindentMode::EachLine),
2508 cx,
2509 );
2510 position.row += 1;
2511 }
2512
2513 if space_below
2514 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2515 {
2516 self.edit(
2517 [(position..position, "\n")],
2518 Some(AutoindentMode::EachLine),
2519 cx,
2520 );
2521 }
2522
2523 self.end_transaction(cx);
2524
2525 position
2526 }
2527
2528 /// Applies the given remote operations to the buffer.
2529 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2530 self.pending_autoindent.take();
2531 let was_dirty = self.is_dirty();
2532 let old_version = self.version.clone();
2533 let mut deferred_ops = Vec::new();
2534 let buffer_ops = ops
2535 .into_iter()
2536 .filter_map(|op| match op {
2537 Operation::Buffer(op) => Some(op),
2538 _ => {
2539 if self.can_apply_op(&op) {
2540 self.apply_op(op, cx);
2541 } else {
2542 deferred_ops.push(op);
2543 }
2544 None
2545 }
2546 })
2547 .collect::<Vec<_>>();
2548 for operation in buffer_ops.iter() {
2549 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2550 }
2551 self.text.apply_ops(buffer_ops);
2552 self.deferred_ops.insert(deferred_ops);
2553 self.flush_deferred_ops(cx);
2554 self.did_edit(&old_version, was_dirty, cx);
2555 // Notify independently of whether the buffer was edited as the operations could include a
2556 // selection update.
2557 cx.notify();
2558 }
2559
2560 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2561 let mut deferred_ops = Vec::new();
2562 for op in self.deferred_ops.drain().iter().cloned() {
2563 if self.can_apply_op(&op) {
2564 self.apply_op(op, cx);
2565 } else {
2566 deferred_ops.push(op);
2567 }
2568 }
2569 self.deferred_ops.insert(deferred_ops);
2570 }
2571
2572 pub fn has_deferred_ops(&self) -> bool {
2573 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2574 }
2575
2576 fn can_apply_op(&self, operation: &Operation) -> bool {
2577 match operation {
2578 Operation::Buffer(_) => {
2579 unreachable!("buffer operations should never be applied at this layer")
2580 }
2581 Operation::UpdateDiagnostics {
2582 diagnostics: diagnostic_set,
2583 ..
2584 } => diagnostic_set.iter().all(|diagnostic| {
2585 self.text.can_resolve(&diagnostic.range.start)
2586 && self.text.can_resolve(&diagnostic.range.end)
2587 }),
2588 Operation::UpdateSelections { selections, .. } => selections
2589 .iter()
2590 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2591 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2592 }
2593 }
2594
2595 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2596 match operation {
2597 Operation::Buffer(_) => {
2598 unreachable!("buffer operations should never be applied at this layer")
2599 }
2600 Operation::UpdateDiagnostics {
2601 server_id,
2602 diagnostics: diagnostic_set,
2603 lamport_timestamp,
2604 } => {
2605 let snapshot = self.snapshot();
2606 self.apply_diagnostic_update(
2607 server_id,
2608 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2609 lamport_timestamp,
2610 cx,
2611 );
2612 }
2613 Operation::UpdateSelections {
2614 selections,
2615 lamport_timestamp,
2616 line_mode,
2617 cursor_shape,
2618 } => {
2619 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2620 && set.lamport_timestamp > lamport_timestamp
2621 {
2622 return;
2623 }
2624
2625 self.remote_selections.insert(
2626 lamport_timestamp.replica_id,
2627 SelectionSet {
2628 selections,
2629 lamport_timestamp,
2630 line_mode,
2631 cursor_shape,
2632 },
2633 );
2634 self.text.lamport_clock.observe(lamport_timestamp);
2635 self.non_text_state_update_count += 1;
2636 }
2637 Operation::UpdateCompletionTriggers {
2638 triggers,
2639 lamport_timestamp,
2640 server_id,
2641 } => {
2642 if triggers.is_empty() {
2643 self.completion_triggers_per_language_server
2644 .remove(&server_id);
2645 self.completion_triggers = self
2646 .completion_triggers_per_language_server
2647 .values()
2648 .flat_map(|triggers| triggers.iter().cloned())
2649 .collect();
2650 } else {
2651 self.completion_triggers_per_language_server
2652 .insert(server_id, triggers.iter().cloned().collect());
2653 self.completion_triggers.extend(triggers);
2654 }
2655 self.text.lamport_clock.observe(lamport_timestamp);
2656 }
2657 Operation::UpdateLineEnding {
2658 line_ending,
2659 lamport_timestamp,
2660 } => {
2661 self.text.set_line_ending(line_ending);
2662 self.text.lamport_clock.observe(lamport_timestamp);
2663 }
2664 }
2665 }
2666
2667 fn apply_diagnostic_update(
2668 &mut self,
2669 server_id: LanguageServerId,
2670 diagnostics: DiagnosticSet,
2671 lamport_timestamp: clock::Lamport,
2672 cx: &mut Context<Self>,
2673 ) {
2674 if lamport_timestamp > self.diagnostics_timestamp {
2675 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2676 if diagnostics.is_empty() {
2677 if let Ok(ix) = ix {
2678 self.diagnostics.remove(ix);
2679 }
2680 } else {
2681 match ix {
2682 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2683 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2684 };
2685 }
2686 self.diagnostics_timestamp = lamport_timestamp;
2687 self.non_text_state_update_count += 1;
2688 self.text.lamport_clock.observe(lamport_timestamp);
2689 cx.notify();
2690 cx.emit(BufferEvent::DiagnosticsUpdated);
2691 }
2692 }
2693
2694 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2695 self.was_changed();
2696 cx.emit(BufferEvent::Operation {
2697 operation,
2698 is_local,
2699 });
2700 }
2701
2702 /// Removes the selections for a given peer.
2703 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2704 self.remote_selections.remove(&replica_id);
2705 cx.notify();
2706 }
2707
2708 /// Undoes the most recent transaction.
2709 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2710 let was_dirty = self.is_dirty();
2711 let old_version = self.version.clone();
2712
2713 if let Some((transaction_id, operation)) = self.text.undo() {
2714 self.send_operation(Operation::Buffer(operation), true, cx);
2715 self.did_edit(&old_version, was_dirty, cx);
2716 Some(transaction_id)
2717 } else {
2718 None
2719 }
2720 }
2721
2722 /// Manually undoes a specific transaction in the buffer's undo history.
2723 pub fn undo_transaction(
2724 &mut self,
2725 transaction_id: TransactionId,
2726 cx: &mut Context<Self>,
2727 ) -> bool {
2728 let was_dirty = self.is_dirty();
2729 let old_version = self.version.clone();
2730 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2731 self.send_operation(Operation::Buffer(operation), true, cx);
2732 self.did_edit(&old_version, was_dirty, cx);
2733 true
2734 } else {
2735 false
2736 }
2737 }
2738
2739 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2740 pub fn undo_to_transaction(
2741 &mut self,
2742 transaction_id: TransactionId,
2743 cx: &mut Context<Self>,
2744 ) -> bool {
2745 let was_dirty = self.is_dirty();
2746 let old_version = self.version.clone();
2747
2748 let operations = self.text.undo_to_transaction(transaction_id);
2749 let undone = !operations.is_empty();
2750 for operation in operations {
2751 self.send_operation(Operation::Buffer(operation), true, cx);
2752 }
2753 if undone {
2754 self.did_edit(&old_version, was_dirty, cx)
2755 }
2756 undone
2757 }
2758
2759 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2760 let was_dirty = self.is_dirty();
2761 let operation = self.text.undo_operations(counts);
2762 let old_version = self.version.clone();
2763 self.send_operation(Operation::Buffer(operation), true, cx);
2764 self.did_edit(&old_version, was_dirty, cx);
2765 }
2766
2767 /// Manually redoes a specific transaction in the buffer's redo history.
2768 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2769 let was_dirty = self.is_dirty();
2770 let old_version = self.version.clone();
2771
2772 if let Some((transaction_id, operation)) = self.text.redo() {
2773 self.send_operation(Operation::Buffer(operation), true, cx);
2774 self.did_edit(&old_version, was_dirty, cx);
2775 Some(transaction_id)
2776 } else {
2777 None
2778 }
2779 }
2780
2781 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2782 pub fn redo_to_transaction(
2783 &mut self,
2784 transaction_id: TransactionId,
2785 cx: &mut Context<Self>,
2786 ) -> bool {
2787 let was_dirty = self.is_dirty();
2788 let old_version = self.version.clone();
2789
2790 let operations = self.text.redo_to_transaction(transaction_id);
2791 let redone = !operations.is_empty();
2792 for operation in operations {
2793 self.send_operation(Operation::Buffer(operation), true, cx);
2794 }
2795 if redone {
2796 self.did_edit(&old_version, was_dirty, cx)
2797 }
2798 redone
2799 }
2800
2801 /// Override current completion triggers with the user-provided completion triggers.
2802 pub fn set_completion_triggers(
2803 &mut self,
2804 server_id: LanguageServerId,
2805 triggers: BTreeSet<String>,
2806 cx: &mut Context<Self>,
2807 ) {
2808 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2809 if triggers.is_empty() {
2810 self.completion_triggers_per_language_server
2811 .remove(&server_id);
2812 self.completion_triggers = self
2813 .completion_triggers_per_language_server
2814 .values()
2815 .flat_map(|triggers| triggers.iter().cloned())
2816 .collect();
2817 } else {
2818 self.completion_triggers_per_language_server
2819 .insert(server_id, triggers.clone());
2820 self.completion_triggers.extend(triggers.iter().cloned());
2821 }
2822 self.send_operation(
2823 Operation::UpdateCompletionTriggers {
2824 triggers: triggers.into_iter().collect(),
2825 lamport_timestamp: self.completion_triggers_timestamp,
2826 server_id,
2827 },
2828 true,
2829 cx,
2830 );
2831 cx.notify();
2832 }
2833
2834 /// Returns a list of strings which trigger a completion menu for this language.
2835 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2836 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2837 &self.completion_triggers
2838 }
2839
2840 /// Call this directly after performing edits to prevent the preview tab
2841 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2842 /// to return false until there are additional edits.
2843 pub fn refresh_preview(&mut self) {
2844 self.preview_version = self.version.clone();
2845 }
2846
2847 /// Whether we should preserve the preview status of a tab containing this buffer.
2848 pub fn preserve_preview(&self) -> bool {
2849 !self.has_edits_since(&self.preview_version)
2850 }
2851}
2852
2853#[doc(hidden)]
2854#[cfg(any(test, feature = "test-support"))]
2855impl Buffer {
2856 pub fn edit_via_marked_text(
2857 &mut self,
2858 marked_string: &str,
2859 autoindent_mode: Option<AutoindentMode>,
2860 cx: &mut Context<Self>,
2861 ) {
2862 let edits = self.edits_for_marked_text(marked_string);
2863 self.edit(edits, autoindent_mode, cx);
2864 }
2865
2866 pub fn set_group_interval(&mut self, group_interval: Duration) {
2867 self.text.set_group_interval(group_interval);
2868 }
2869
2870 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2871 where
2872 T: rand::Rng,
2873 {
2874 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2875 let mut last_end = None;
2876 for _ in 0..old_range_count {
2877 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2878 break;
2879 }
2880
2881 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2882 let mut range = self.random_byte_range(new_start, rng);
2883 if rng.random_bool(0.2) {
2884 mem::swap(&mut range.start, &mut range.end);
2885 }
2886 last_end = Some(range.end);
2887
2888 let new_text_len = rng.random_range(0..10);
2889 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2890 new_text = new_text.to_uppercase();
2891
2892 edits.push((range, new_text));
2893 }
2894 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2895 self.edit(edits, None, cx);
2896 }
2897
2898 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2899 let was_dirty = self.is_dirty();
2900 let old_version = self.version.clone();
2901
2902 let ops = self.text.randomly_undo_redo(rng);
2903 if !ops.is_empty() {
2904 for op in ops {
2905 self.send_operation(Operation::Buffer(op), true, cx);
2906 self.did_edit(&old_version, was_dirty, cx);
2907 }
2908 }
2909 }
2910}
2911
2912impl EventEmitter<BufferEvent> for Buffer {}
2913
2914impl Deref for Buffer {
2915 type Target = TextBuffer;
2916
2917 fn deref(&self) -> &Self::Target {
2918 &self.text
2919 }
2920}
2921
2922impl BufferSnapshot {
2923 /// Returns [`IndentSize`] for a given line that respects user settings and
2924 /// language preferences.
2925 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2926 indent_size_for_line(self, row)
2927 }
2928
2929 /// Returns [`IndentSize`] for a given position that respects user settings
2930 /// and language preferences.
2931 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2932 let settings = language_settings(
2933 self.language_at(position).map(|l| l.name()),
2934 self.file(),
2935 cx,
2936 );
2937 if settings.hard_tabs {
2938 IndentSize::tab()
2939 } else {
2940 IndentSize::spaces(settings.tab_size.get())
2941 }
2942 }
2943
2944 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2945 /// is passed in as `single_indent_size`.
2946 pub fn suggested_indents(
2947 &self,
2948 rows: impl Iterator<Item = u32>,
2949 single_indent_size: IndentSize,
2950 ) -> BTreeMap<u32, IndentSize> {
2951 let mut result = BTreeMap::new();
2952
2953 for row_range in contiguous_ranges(rows, 10) {
2954 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2955 Some(suggestions) => suggestions,
2956 _ => break,
2957 };
2958
2959 for (row, suggestion) in row_range.zip(suggestions) {
2960 let indent_size = if let Some(suggestion) = suggestion {
2961 result
2962 .get(&suggestion.basis_row)
2963 .copied()
2964 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2965 .with_delta(suggestion.delta, single_indent_size)
2966 } else {
2967 self.indent_size_for_line(row)
2968 };
2969
2970 result.insert(row, indent_size);
2971 }
2972 }
2973
2974 result
2975 }
2976
2977 fn suggest_autoindents(
2978 &self,
2979 row_range: Range<u32>,
2980 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2981 let config = &self.language.as_ref()?.config;
2982 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2983
2984 #[derive(Debug, Clone)]
2985 struct StartPosition {
2986 start: Point,
2987 suffix: SharedString,
2988 }
2989
2990 // Find the suggested indentation ranges based on the syntax tree.
2991 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2992 let end = Point::new(row_range.end, 0);
2993 let range = (start..end).to_offset(&self.text);
2994 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2995 Some(&grammar.indents_config.as_ref()?.query)
2996 });
2997 let indent_configs = matches
2998 .grammars()
2999 .iter()
3000 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3001 .collect::<Vec<_>>();
3002
3003 let mut indent_ranges = Vec::<Range<Point>>::new();
3004 let mut start_positions = Vec::<StartPosition>::new();
3005 let mut outdent_positions = Vec::<Point>::new();
3006 while let Some(mat) = matches.peek() {
3007 let mut start: Option<Point> = None;
3008 let mut end: Option<Point> = None;
3009
3010 let config = indent_configs[mat.grammar_index];
3011 for capture in mat.captures {
3012 if capture.index == config.indent_capture_ix {
3013 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3014 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3015 } else if Some(capture.index) == config.start_capture_ix {
3016 start = Some(Point::from_ts_point(capture.node.end_position()));
3017 } else if Some(capture.index) == config.end_capture_ix {
3018 end = Some(Point::from_ts_point(capture.node.start_position()));
3019 } else if Some(capture.index) == config.outdent_capture_ix {
3020 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3021 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3022 start_positions.push(StartPosition {
3023 start: Point::from_ts_point(capture.node.start_position()),
3024 suffix: suffix.clone(),
3025 });
3026 }
3027 }
3028
3029 matches.advance();
3030 if let Some((start, end)) = start.zip(end) {
3031 if start.row == end.row {
3032 continue;
3033 }
3034 let range = start..end;
3035 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3036 Err(ix) => indent_ranges.insert(ix, range),
3037 Ok(ix) => {
3038 let prev_range = &mut indent_ranges[ix];
3039 prev_range.end = prev_range.end.max(range.end);
3040 }
3041 }
3042 }
3043 }
3044
3045 let mut error_ranges = Vec::<Range<Point>>::new();
3046 let mut matches = self
3047 .syntax
3048 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3049 while let Some(mat) = matches.peek() {
3050 let node = mat.captures[0].node;
3051 let start = Point::from_ts_point(node.start_position());
3052 let end = Point::from_ts_point(node.end_position());
3053 let range = start..end;
3054 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3055 Ok(ix) | Err(ix) => ix,
3056 };
3057 let mut end_ix = ix;
3058 while let Some(existing_range) = error_ranges.get(end_ix) {
3059 if existing_range.end < end {
3060 end_ix += 1;
3061 } else {
3062 break;
3063 }
3064 }
3065 error_ranges.splice(ix..end_ix, [range]);
3066 matches.advance();
3067 }
3068
3069 outdent_positions.sort();
3070 for outdent_position in outdent_positions {
3071 // find the innermost indent range containing this outdent_position
3072 // set its end to the outdent position
3073 if let Some(range_to_truncate) = indent_ranges
3074 .iter_mut()
3075 .filter(|indent_range| indent_range.contains(&outdent_position))
3076 .next_back()
3077 {
3078 range_to_truncate.end = outdent_position;
3079 }
3080 }
3081
3082 start_positions.sort_by_key(|b| b.start);
3083
3084 // Find the suggested indentation increases and decreased based on regexes.
3085 let mut regex_outdent_map = HashMap::default();
3086 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3087 let mut start_positions_iter = start_positions.iter().peekable();
3088
3089 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3090 self.for_each_line(
3091 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3092 ..Point::new(row_range.end, 0),
3093 |row, line| {
3094 if config
3095 .decrease_indent_pattern
3096 .as_ref()
3097 .is_some_and(|regex| regex.is_match(line))
3098 {
3099 indent_change_rows.push((row, Ordering::Less));
3100 }
3101 if config
3102 .increase_indent_pattern
3103 .as_ref()
3104 .is_some_and(|regex| regex.is_match(line))
3105 {
3106 indent_change_rows.push((row + 1, Ordering::Greater));
3107 }
3108 while let Some(pos) = start_positions_iter.peek() {
3109 if pos.start.row < row {
3110 let pos = start_positions_iter.next().unwrap();
3111 last_seen_suffix
3112 .entry(pos.suffix.to_string())
3113 .or_default()
3114 .push(pos.start);
3115 } else {
3116 break;
3117 }
3118 }
3119 for rule in &config.decrease_indent_patterns {
3120 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3121 let row_start_column = self.indent_size_for_line(row).len;
3122 let basis_row = rule
3123 .valid_after
3124 .iter()
3125 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3126 .flatten()
3127 .filter(|start_point| start_point.column <= row_start_column)
3128 .max_by_key(|start_point| start_point.row);
3129 if let Some(outdent_to_row) = basis_row {
3130 regex_outdent_map.insert(row, outdent_to_row.row);
3131 }
3132 break;
3133 }
3134 }
3135 },
3136 );
3137
3138 let mut indent_changes = indent_change_rows.into_iter().peekable();
3139 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3140 prev_non_blank_row.unwrap_or(0)
3141 } else {
3142 row_range.start.saturating_sub(1)
3143 };
3144
3145 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3146 Some(row_range.map(move |row| {
3147 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3148
3149 let mut indent_from_prev_row = false;
3150 let mut outdent_from_prev_row = false;
3151 let mut outdent_to_row = u32::MAX;
3152 let mut from_regex = false;
3153
3154 while let Some((indent_row, delta)) = indent_changes.peek() {
3155 match indent_row.cmp(&row) {
3156 Ordering::Equal => match delta {
3157 Ordering::Less => {
3158 from_regex = true;
3159 outdent_from_prev_row = true
3160 }
3161 Ordering::Greater => {
3162 indent_from_prev_row = true;
3163 from_regex = true
3164 }
3165 _ => {}
3166 },
3167
3168 Ordering::Greater => break,
3169 Ordering::Less => {}
3170 }
3171
3172 indent_changes.next();
3173 }
3174
3175 for range in &indent_ranges {
3176 if range.start.row >= row {
3177 break;
3178 }
3179 if range.start.row == prev_row && range.end > row_start {
3180 indent_from_prev_row = true;
3181 }
3182 if range.end > prev_row_start && range.end <= row_start {
3183 outdent_to_row = outdent_to_row.min(range.start.row);
3184 }
3185 }
3186
3187 if let Some(basis_row) = regex_outdent_map.get(&row) {
3188 indent_from_prev_row = false;
3189 outdent_to_row = *basis_row;
3190 from_regex = true;
3191 }
3192
3193 let within_error = error_ranges
3194 .iter()
3195 .any(|e| e.start.row < row && e.end > row_start);
3196
3197 let suggestion = if outdent_to_row == prev_row
3198 || (outdent_from_prev_row && indent_from_prev_row)
3199 {
3200 Some(IndentSuggestion {
3201 basis_row: prev_row,
3202 delta: Ordering::Equal,
3203 within_error: within_error && !from_regex,
3204 })
3205 } else if indent_from_prev_row {
3206 Some(IndentSuggestion {
3207 basis_row: prev_row,
3208 delta: Ordering::Greater,
3209 within_error: within_error && !from_regex,
3210 })
3211 } else if outdent_to_row < prev_row {
3212 Some(IndentSuggestion {
3213 basis_row: outdent_to_row,
3214 delta: Ordering::Equal,
3215 within_error: within_error && !from_regex,
3216 })
3217 } else if outdent_from_prev_row {
3218 Some(IndentSuggestion {
3219 basis_row: prev_row,
3220 delta: Ordering::Less,
3221 within_error: within_error && !from_regex,
3222 })
3223 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3224 {
3225 Some(IndentSuggestion {
3226 basis_row: prev_row,
3227 delta: Ordering::Equal,
3228 within_error: within_error && !from_regex,
3229 })
3230 } else {
3231 None
3232 };
3233
3234 prev_row = row;
3235 prev_row_start = row_start;
3236 suggestion
3237 }))
3238 }
3239
3240 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3241 while row > 0 {
3242 row -= 1;
3243 if !self.is_line_blank(row) {
3244 return Some(row);
3245 }
3246 }
3247 None
3248 }
3249
3250 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3251 let captures = self.syntax.captures(range, &self.text, |grammar| {
3252 grammar
3253 .highlights_config
3254 .as_ref()
3255 .map(|config| &config.query)
3256 });
3257 let highlight_maps = captures
3258 .grammars()
3259 .iter()
3260 .map(|grammar| grammar.highlight_map())
3261 .collect();
3262 (captures, highlight_maps)
3263 }
3264
3265 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3266 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3267 /// returned in chunks where each chunk has a single syntax highlighting style and
3268 /// diagnostic status.
3269 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3270 let range = range.start.to_offset(self)..range.end.to_offset(self);
3271
3272 let mut syntax = None;
3273 if language_aware {
3274 syntax = Some(self.get_highlights(range.clone()));
3275 }
3276 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3277 let diagnostics = language_aware;
3278 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3279 }
3280
3281 pub fn highlighted_text_for_range<T: ToOffset>(
3282 &self,
3283 range: Range<T>,
3284 override_style: Option<HighlightStyle>,
3285 syntax_theme: &SyntaxTheme,
3286 ) -> HighlightedText {
3287 HighlightedText::from_buffer_range(
3288 range,
3289 &self.text,
3290 &self.syntax,
3291 override_style,
3292 syntax_theme,
3293 )
3294 }
3295
3296 /// Invokes the given callback for each line of text in the given range of the buffer.
3297 /// Uses callback to avoid allocating a string for each line.
3298 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3299 let mut line = String::new();
3300 let mut row = range.start.row;
3301 for chunk in self
3302 .as_rope()
3303 .chunks_in_range(range.to_offset(self))
3304 .chain(["\n"])
3305 {
3306 for (newline_ix, text) in chunk.split('\n').enumerate() {
3307 if newline_ix > 0 {
3308 callback(row, &line);
3309 row += 1;
3310 line.clear();
3311 }
3312 line.push_str(text);
3313 }
3314 }
3315 }
3316
3317 /// Iterates over every [`SyntaxLayer`] in the buffer.
3318 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3319 self.syntax_layers_for_range(0..self.len(), true)
3320 }
3321
3322 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3323 let offset = position.to_offset(self);
3324 self.syntax_layers_for_range(offset..offset, false)
3325 .filter(|l| l.node().end_byte() > offset)
3326 .last()
3327 }
3328
3329 pub fn syntax_layers_for_range<D: ToOffset>(
3330 &self,
3331 range: Range<D>,
3332 include_hidden: bool,
3333 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3334 self.syntax
3335 .layers_for_range(range, &self.text, include_hidden)
3336 }
3337
3338 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3339 &self,
3340 range: Range<D>,
3341 ) -> Option<SyntaxLayer<'_>> {
3342 let range = range.to_offset(self);
3343 self.syntax
3344 .layers_for_range(range, &self.text, false)
3345 .max_by(|a, b| {
3346 if a.depth != b.depth {
3347 a.depth.cmp(&b.depth)
3348 } else if a.offset.0 != b.offset.0 {
3349 a.offset.0.cmp(&b.offset.0)
3350 } else {
3351 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3352 }
3353 })
3354 }
3355
3356 /// Returns the main [`Language`].
3357 pub fn language(&self) -> Option<&Arc<Language>> {
3358 self.language.as_ref()
3359 }
3360
3361 /// Returns the [`Language`] at the given location.
3362 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3363 self.syntax_layer_at(position)
3364 .map(|info| info.language)
3365 .or(self.language.as_ref())
3366 }
3367
3368 /// Returns the settings for the language at the given location.
3369 pub fn settings_at<'a, D: ToOffset>(
3370 &'a self,
3371 position: D,
3372 cx: &'a App,
3373 ) -> Cow<'a, LanguageSettings> {
3374 language_settings(
3375 self.language_at(position).map(|l| l.name()),
3376 self.file.as_ref(),
3377 cx,
3378 )
3379 }
3380
3381 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3382 CharClassifier::new(self.language_scope_at(point))
3383 }
3384
3385 /// Returns the [`LanguageScope`] at the given location.
3386 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3387 let offset = position.to_offset(self);
3388 let mut scope = None;
3389 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3390
3391 // Use the layer that has the smallest node intersecting the given point.
3392 for layer in self
3393 .syntax
3394 .layers_for_range(offset..offset, &self.text, false)
3395 {
3396 let mut cursor = layer.node().walk();
3397
3398 let mut range = None;
3399 loop {
3400 let child_range = cursor.node().byte_range();
3401 if !child_range.contains(&offset) {
3402 break;
3403 }
3404
3405 range = Some(child_range);
3406 if cursor.goto_first_child_for_byte(offset).is_none() {
3407 break;
3408 }
3409 }
3410
3411 if let Some(range) = range
3412 && smallest_range_and_depth.as_ref().is_none_or(
3413 |(smallest_range, smallest_range_depth)| {
3414 if layer.depth > *smallest_range_depth {
3415 true
3416 } else if layer.depth == *smallest_range_depth {
3417 range.len() < smallest_range.len()
3418 } else {
3419 false
3420 }
3421 },
3422 )
3423 {
3424 smallest_range_and_depth = Some((range, layer.depth));
3425 scope = Some(LanguageScope {
3426 language: layer.language.clone(),
3427 override_id: layer.override_id(offset, &self.text),
3428 });
3429 }
3430 }
3431
3432 scope.or_else(|| {
3433 self.language.clone().map(|language| LanguageScope {
3434 language,
3435 override_id: None,
3436 })
3437 })
3438 }
3439
3440 /// Returns a tuple of the range and character kind of the word
3441 /// surrounding the given position.
3442 pub fn surrounding_word<T: ToOffset>(
3443 &self,
3444 start: T,
3445 for_completion: bool,
3446 ) -> (Range<usize>, Option<CharKind>) {
3447 let mut start = start.to_offset(self);
3448 let mut end = start;
3449 let mut next_chars = self.chars_at(start).take(128).peekable();
3450 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3451
3452 let classifier = self
3453 .char_classifier_at(start)
3454 .for_completion(for_completion);
3455 let word_kind = cmp::max(
3456 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3457 next_chars.peek().copied().map(|c| classifier.kind(c)),
3458 );
3459
3460 for ch in prev_chars {
3461 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3462 start -= ch.len_utf8();
3463 } else {
3464 break;
3465 }
3466 }
3467
3468 for ch in next_chars {
3469 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3470 end += ch.len_utf8();
3471 } else {
3472 break;
3473 }
3474 }
3475
3476 (start..end, word_kind)
3477 }
3478
3479 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3480 /// range. When `require_larger` is true, the node found must be larger than the query range.
3481 ///
3482 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3483 /// be moved to the root of the tree.
3484 fn goto_node_enclosing_range(
3485 cursor: &mut tree_sitter::TreeCursor,
3486 query_range: &Range<usize>,
3487 require_larger: bool,
3488 ) -> bool {
3489 let mut ascending = false;
3490 loop {
3491 let mut range = cursor.node().byte_range();
3492 if query_range.is_empty() {
3493 // When the query range is empty and the current node starts after it, move to the
3494 // previous sibling to find the node the containing node.
3495 if range.start > query_range.start {
3496 cursor.goto_previous_sibling();
3497 range = cursor.node().byte_range();
3498 }
3499 } else {
3500 // When the query range is non-empty and the current node ends exactly at the start,
3501 // move to the next sibling to find a node that extends beyond the start.
3502 if range.end == query_range.start {
3503 cursor.goto_next_sibling();
3504 range = cursor.node().byte_range();
3505 }
3506 }
3507
3508 let encloses = range.contains_inclusive(query_range)
3509 && (!require_larger || range.len() > query_range.len());
3510 if !encloses {
3511 ascending = true;
3512 if !cursor.goto_parent() {
3513 return false;
3514 }
3515 continue;
3516 } else if ascending {
3517 return true;
3518 }
3519
3520 // Descend into the current node.
3521 if cursor
3522 .goto_first_child_for_byte(query_range.start)
3523 .is_none()
3524 {
3525 return true;
3526 }
3527 }
3528 }
3529
3530 pub fn syntax_ancestor<'a, T: ToOffset>(
3531 &'a self,
3532 range: Range<T>,
3533 ) -> Option<tree_sitter::Node<'a>> {
3534 let range = range.start.to_offset(self)..range.end.to_offset(self);
3535 let mut result: Option<tree_sitter::Node<'a>> = None;
3536 for layer in self
3537 .syntax
3538 .layers_for_range(range.clone(), &self.text, true)
3539 {
3540 let mut cursor = layer.node().walk();
3541
3542 // Find the node that both contains the range and is larger than it.
3543 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3544 continue;
3545 }
3546
3547 let left_node = cursor.node();
3548 let mut layer_result = left_node;
3549
3550 // For an empty range, try to find another node immediately to the right of the range.
3551 if left_node.end_byte() == range.start {
3552 let mut right_node = None;
3553 while !cursor.goto_next_sibling() {
3554 if !cursor.goto_parent() {
3555 break;
3556 }
3557 }
3558
3559 while cursor.node().start_byte() == range.start {
3560 right_node = Some(cursor.node());
3561 if !cursor.goto_first_child() {
3562 break;
3563 }
3564 }
3565
3566 // If there is a candidate node on both sides of the (empty) range, then
3567 // decide between the two by favoring a named node over an anonymous token.
3568 // If both nodes are the same in that regard, favor the right one.
3569 if let Some(right_node) = right_node
3570 && (right_node.is_named() || !left_node.is_named())
3571 {
3572 layer_result = right_node;
3573 }
3574 }
3575
3576 if let Some(previous_result) = &result
3577 && previous_result.byte_range().len() < layer_result.byte_range().len()
3578 {
3579 continue;
3580 }
3581 result = Some(layer_result);
3582 }
3583
3584 result
3585 }
3586
3587 /// Find the previous sibling syntax node at the given range.
3588 ///
3589 /// This function locates the syntax node that precedes the node containing
3590 /// the given range. It searches hierarchically by:
3591 /// 1. Finding the node that contains the given range
3592 /// 2. Looking for the previous sibling at the same tree level
3593 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3594 ///
3595 /// Returns `None` if there is no previous sibling at any ancestor level.
3596 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3597 &'a self,
3598 range: Range<T>,
3599 ) -> Option<tree_sitter::Node<'a>> {
3600 let range = range.start.to_offset(self)..range.end.to_offset(self);
3601 let mut result: Option<tree_sitter::Node<'a>> = None;
3602
3603 for layer in self
3604 .syntax
3605 .layers_for_range(range.clone(), &self.text, true)
3606 {
3607 let mut cursor = layer.node().walk();
3608
3609 // Find the node that contains the range
3610 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3611 continue;
3612 }
3613
3614 // Look for the previous sibling, moving up ancestor levels if needed
3615 loop {
3616 if cursor.goto_previous_sibling() {
3617 let layer_result = cursor.node();
3618
3619 if let Some(previous_result) = &result {
3620 if previous_result.byte_range().end < layer_result.byte_range().end {
3621 continue;
3622 }
3623 }
3624 result = Some(layer_result);
3625 break;
3626 }
3627
3628 // No sibling found at this level, try moving up to parent
3629 if !cursor.goto_parent() {
3630 break;
3631 }
3632 }
3633 }
3634
3635 result
3636 }
3637
3638 /// Find the next sibling syntax node at the given range.
3639 ///
3640 /// This function locates the syntax node that follows the node containing
3641 /// the given range. It searches hierarchically by:
3642 /// 1. Finding the node that contains the given range
3643 /// 2. Looking for the next sibling at the same tree level
3644 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3645 ///
3646 /// Returns `None` if there is no next sibling at any ancestor level.
3647 pub fn syntax_next_sibling<'a, T: ToOffset>(
3648 &'a self,
3649 range: Range<T>,
3650 ) -> Option<tree_sitter::Node<'a>> {
3651 let range = range.start.to_offset(self)..range.end.to_offset(self);
3652 let mut result: Option<tree_sitter::Node<'a>> = None;
3653
3654 for layer in self
3655 .syntax
3656 .layers_for_range(range.clone(), &self.text, true)
3657 {
3658 let mut cursor = layer.node().walk();
3659
3660 // Find the node that contains the range
3661 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3662 continue;
3663 }
3664
3665 // Look for the next sibling, moving up ancestor levels if needed
3666 loop {
3667 if cursor.goto_next_sibling() {
3668 let layer_result = cursor.node();
3669
3670 if let Some(previous_result) = &result {
3671 if previous_result.byte_range().start > layer_result.byte_range().start {
3672 continue;
3673 }
3674 }
3675 result = Some(layer_result);
3676 break;
3677 }
3678
3679 // No sibling found at this level, try moving up to parent
3680 if !cursor.goto_parent() {
3681 break;
3682 }
3683 }
3684 }
3685
3686 result
3687 }
3688
3689 /// Returns the root syntax node within the given row
3690 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3691 let start_offset = position.to_offset(self);
3692
3693 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3694
3695 let layer = self
3696 .syntax
3697 .layers_for_range(start_offset..start_offset, &self.text, true)
3698 .next()?;
3699
3700 let mut cursor = layer.node().walk();
3701
3702 // Descend to the first leaf that touches the start of the range.
3703 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3704 if cursor.node().end_byte() == start_offset {
3705 cursor.goto_next_sibling();
3706 }
3707 }
3708
3709 // Ascend to the root node within the same row.
3710 while cursor.goto_parent() {
3711 if cursor.node().start_position().row != row {
3712 break;
3713 }
3714 }
3715
3716 Some(cursor.node())
3717 }
3718
3719 /// Returns the outline for the buffer.
3720 ///
3721 /// This method allows passing an optional [`SyntaxTheme`] to
3722 /// syntax-highlight the returned symbols.
3723 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3724 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3725 }
3726
3727 /// Returns all the symbols that contain the given position.
3728 ///
3729 /// This method allows passing an optional [`SyntaxTheme`] to
3730 /// syntax-highlight the returned symbols.
3731 pub fn symbols_containing<T: ToOffset>(
3732 &self,
3733 position: T,
3734 theme: Option<&SyntaxTheme>,
3735 ) -> Vec<OutlineItem<Anchor>> {
3736 let position = position.to_offset(self);
3737 let mut items = self.outline_items_containing(
3738 position.saturating_sub(1)..self.len().min(position + 1),
3739 false,
3740 theme,
3741 );
3742 let mut prev_depth = None;
3743 items.retain(|item| {
3744 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3745 prev_depth = Some(item.depth);
3746 result
3747 });
3748 items
3749 }
3750
3751 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3752 let range = range.to_offset(self);
3753 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3754 grammar.outline_config.as_ref().map(|c| &c.query)
3755 });
3756 let configs = matches
3757 .grammars()
3758 .iter()
3759 .map(|g| g.outline_config.as_ref().unwrap())
3760 .collect::<Vec<_>>();
3761
3762 while let Some(mat) = matches.peek() {
3763 let config = &configs[mat.grammar_index];
3764 let containing_item_node = maybe!({
3765 let item_node = mat.captures.iter().find_map(|cap| {
3766 if cap.index == config.item_capture_ix {
3767 Some(cap.node)
3768 } else {
3769 None
3770 }
3771 })?;
3772
3773 let item_byte_range = item_node.byte_range();
3774 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3775 None
3776 } else {
3777 Some(item_node)
3778 }
3779 });
3780
3781 if let Some(item_node) = containing_item_node {
3782 return Some(
3783 Point::from_ts_point(item_node.start_position())
3784 ..Point::from_ts_point(item_node.end_position()),
3785 );
3786 }
3787
3788 matches.advance();
3789 }
3790 None
3791 }
3792
3793 pub fn outline_items_containing<T: ToOffset>(
3794 &self,
3795 range: Range<T>,
3796 include_extra_context: bool,
3797 theme: Option<&SyntaxTheme>,
3798 ) -> Vec<OutlineItem<Anchor>> {
3799 let range = range.to_offset(self);
3800 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3801 grammar.outline_config.as_ref().map(|c| &c.query)
3802 });
3803
3804 let mut items = Vec::new();
3805 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3806 while let Some(mat) = matches.peek() {
3807 let config = matches.grammars()[mat.grammar_index]
3808 .outline_config
3809 .as_ref()
3810 .unwrap();
3811 if let Some(item) =
3812 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3813 {
3814 items.push(item);
3815 } else if let Some(capture) = mat
3816 .captures
3817 .iter()
3818 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3819 {
3820 let capture_range = capture.node.start_position()..capture.node.end_position();
3821 let mut capture_row_range =
3822 capture_range.start.row as u32..capture_range.end.row as u32;
3823 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3824 {
3825 capture_row_range.end -= 1;
3826 }
3827 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3828 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3829 last_row_range.end = capture_row_range.end;
3830 } else {
3831 annotation_row_ranges.push(capture_row_range);
3832 }
3833 } else {
3834 annotation_row_ranges.push(capture_row_range);
3835 }
3836 }
3837 matches.advance();
3838 }
3839
3840 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3841
3842 // Assign depths based on containment relationships and convert to anchors.
3843 let mut item_ends_stack = Vec::<Point>::new();
3844 let mut anchor_items = Vec::new();
3845 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3846 for item in items {
3847 while let Some(last_end) = item_ends_stack.last().copied() {
3848 if last_end < item.range.end {
3849 item_ends_stack.pop();
3850 } else {
3851 break;
3852 }
3853 }
3854
3855 let mut annotation_row_range = None;
3856 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3857 let row_preceding_item = item.range.start.row.saturating_sub(1);
3858 if next_annotation_row_range.end < row_preceding_item {
3859 annotation_row_ranges.next();
3860 } else {
3861 if next_annotation_row_range.end == row_preceding_item {
3862 annotation_row_range = Some(next_annotation_row_range.clone());
3863 annotation_row_ranges.next();
3864 }
3865 break;
3866 }
3867 }
3868
3869 anchor_items.push(OutlineItem {
3870 depth: item_ends_stack.len(),
3871 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3872 text: item.text,
3873 highlight_ranges: item.highlight_ranges,
3874 name_ranges: item.name_ranges,
3875 signature_range: item
3876 .signature_range
3877 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3878 body_range: item
3879 .body_range
3880 .map(|r| self.anchor_after(r.start)..self.anchor_before(r.end)),
3881 annotation_range: annotation_row_range.map(|annotation_range| {
3882 self.anchor_after(Point::new(annotation_range.start, 0))
3883 ..self.anchor_before(Point::new(
3884 annotation_range.end,
3885 self.line_len(annotation_range.end),
3886 ))
3887 }),
3888 });
3889 item_ends_stack.push(item.range.end);
3890 }
3891
3892 anchor_items
3893 }
3894
3895 fn next_outline_item(
3896 &self,
3897 config: &OutlineConfig,
3898 mat: &SyntaxMapMatch,
3899 range: &Range<usize>,
3900 include_extra_context: bool,
3901 theme: Option<&SyntaxTheme>,
3902 ) -> Option<OutlineItem<Point>> {
3903 let item_node = mat.captures.iter().find_map(|cap| {
3904 if cap.index == config.item_capture_ix {
3905 Some(cap.node)
3906 } else {
3907 None
3908 }
3909 })?;
3910
3911 let item_byte_range = item_node.byte_range();
3912 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3913 return None;
3914 }
3915 let item_point_range = Point::from_ts_point(item_node.start_position())
3916 ..Point::from_ts_point(item_node.end_position());
3917
3918 let mut open_point = None;
3919 let mut close_point = None;
3920
3921 let mut signature_start = None;
3922 let mut signature_end = None;
3923 let mut extend_signature_range = |node: tree_sitter::Node| {
3924 if signature_start.is_none() {
3925 signature_start = Some(Point::from_ts_point(node.start_position()));
3926 }
3927 signature_end = Some(Point::from_ts_point(node.end_position()));
3928 };
3929
3930 let mut buffer_ranges = Vec::new();
3931 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3932 let mut range = node.start_byte()..node.end_byte();
3933 let start = node.start_position();
3934 if node.end_position().row > start.row {
3935 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3936 }
3937
3938 if !range.is_empty() {
3939 buffer_ranges.push((range, node_is_name));
3940 }
3941 };
3942
3943 for capture in mat.captures {
3944 if capture.index == config.name_capture_ix {
3945 add_to_buffer_ranges(capture.node, true);
3946 extend_signature_range(capture.node);
3947 } else if Some(capture.index) == config.context_capture_ix
3948 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3949 {
3950 add_to_buffer_ranges(capture.node, false);
3951 extend_signature_range(capture.node);
3952 } else {
3953 if Some(capture.index) == config.open_capture_ix {
3954 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3955 } else if Some(capture.index) == config.close_capture_ix {
3956 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3957 }
3958 }
3959 }
3960
3961 if buffer_ranges.is_empty() {
3962 return None;
3963 }
3964
3965 let mut text = String::new();
3966 let mut highlight_ranges = Vec::new();
3967 let mut name_ranges = Vec::new();
3968 let mut chunks = self.chunks(
3969 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3970 true,
3971 );
3972 let mut last_buffer_range_end = 0;
3973 for (buffer_range, is_name) in buffer_ranges {
3974 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3975 if space_added {
3976 text.push(' ');
3977 }
3978 let before_append_len = text.len();
3979 let mut offset = buffer_range.start;
3980 chunks.seek(buffer_range.clone());
3981 for mut chunk in chunks.by_ref() {
3982 if chunk.text.len() > buffer_range.end - offset {
3983 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3984 offset = buffer_range.end;
3985 } else {
3986 offset += chunk.text.len();
3987 }
3988 let style = chunk
3989 .syntax_highlight_id
3990 .zip(theme)
3991 .and_then(|(highlight, theme)| highlight.style(theme));
3992 if let Some(style) = style {
3993 let start = text.len();
3994 let end = start + chunk.text.len();
3995 highlight_ranges.push((start..end, style));
3996 }
3997 text.push_str(chunk.text);
3998 if offset >= buffer_range.end {
3999 break;
4000 }
4001 }
4002 if is_name {
4003 let after_append_len = text.len();
4004 let start = if space_added && !name_ranges.is_empty() {
4005 before_append_len - 1
4006 } else {
4007 before_append_len
4008 };
4009 name_ranges.push(start..after_append_len);
4010 }
4011 last_buffer_range_end = buffer_range.end;
4012 }
4013
4014 let signature_range = signature_start
4015 .zip(signature_end)
4016 .map(|(start, end)| start..end);
4017
4018 Some(OutlineItem {
4019 depth: 0, // We'll calculate the depth later
4020 range: item_point_range,
4021 text,
4022 highlight_ranges,
4023 name_ranges,
4024 signature_range,
4025 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4026 annotation_range: None,
4027 })
4028 }
4029
4030 pub fn function_body_fold_ranges<T: ToOffset>(
4031 &self,
4032 within: Range<T>,
4033 ) -> impl Iterator<Item = Range<usize>> + '_ {
4034 self.text_object_ranges(within, TreeSitterOptions::default())
4035 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4036 }
4037
4038 /// For each grammar in the language, runs the provided
4039 /// [`tree_sitter::Query`] against the given range.
4040 pub fn matches(
4041 &self,
4042 range: Range<usize>,
4043 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4044 ) -> SyntaxMapMatches<'_> {
4045 self.syntax.matches(range, self, query)
4046 }
4047
4048 pub fn all_bracket_ranges(
4049 &self,
4050 range: Range<usize>,
4051 ) -> impl Iterator<Item = BracketMatch> + '_ {
4052 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4053 grammar.brackets_config.as_ref().map(|c| &c.query)
4054 });
4055 let configs = matches
4056 .grammars()
4057 .iter()
4058 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4059 .collect::<Vec<_>>();
4060
4061 iter::from_fn(move || {
4062 while let Some(mat) = matches.peek() {
4063 let mut open = None;
4064 let mut close = None;
4065 let config = &configs[mat.grammar_index];
4066 let pattern = &config.patterns[mat.pattern_index];
4067 for capture in mat.captures {
4068 if capture.index == config.open_capture_ix {
4069 open = Some(capture.node.byte_range());
4070 } else if capture.index == config.close_capture_ix {
4071 close = Some(capture.node.byte_range());
4072 }
4073 }
4074
4075 matches.advance();
4076
4077 let Some((open_range, close_range)) = open.zip(close) else {
4078 continue;
4079 };
4080
4081 let bracket_range = open_range.start..=close_range.end;
4082 if !bracket_range.overlaps(&range) {
4083 continue;
4084 }
4085
4086 return Some(BracketMatch {
4087 open_range,
4088 close_range,
4089 newline_only: pattern.newline_only,
4090 });
4091 }
4092 None
4093 })
4094 }
4095
4096 /// Returns bracket range pairs overlapping or adjacent to `range`
4097 pub fn bracket_ranges<T: ToOffset>(
4098 &self,
4099 range: Range<T>,
4100 ) -> impl Iterator<Item = BracketMatch> + '_ {
4101 // Find bracket pairs that *inclusively* contain the given range.
4102 let range = range.start.to_offset(self).saturating_sub(1)
4103 ..self.len().min(range.end.to_offset(self) + 1);
4104 self.all_bracket_ranges(range)
4105 .filter(|pair| !pair.newline_only)
4106 }
4107
4108 pub fn debug_variables_query<T: ToOffset>(
4109 &self,
4110 range: Range<T>,
4111 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4112 let range = range.start.to_offset(self).saturating_sub(1)
4113 ..self.len().min(range.end.to_offset(self) + 1);
4114
4115 let mut matches = self.syntax.matches_with_options(
4116 range.clone(),
4117 &self.text,
4118 TreeSitterOptions::default(),
4119 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4120 );
4121
4122 let configs = matches
4123 .grammars()
4124 .iter()
4125 .map(|grammar| grammar.debug_variables_config.as_ref())
4126 .collect::<Vec<_>>();
4127
4128 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4129
4130 iter::from_fn(move || {
4131 loop {
4132 while let Some(capture) = captures.pop() {
4133 if capture.0.overlaps(&range) {
4134 return Some(capture);
4135 }
4136 }
4137
4138 let mat = matches.peek()?;
4139
4140 let Some(config) = configs[mat.grammar_index].as_ref() else {
4141 matches.advance();
4142 continue;
4143 };
4144
4145 for capture in mat.captures {
4146 let Some(ix) = config
4147 .objects_by_capture_ix
4148 .binary_search_by_key(&capture.index, |e| e.0)
4149 .ok()
4150 else {
4151 continue;
4152 };
4153 let text_object = config.objects_by_capture_ix[ix].1;
4154 let byte_range = capture.node.byte_range();
4155
4156 let mut found = false;
4157 for (range, existing) in captures.iter_mut() {
4158 if existing == &text_object {
4159 range.start = range.start.min(byte_range.start);
4160 range.end = range.end.max(byte_range.end);
4161 found = true;
4162 break;
4163 }
4164 }
4165
4166 if !found {
4167 captures.push((byte_range, text_object));
4168 }
4169 }
4170
4171 matches.advance();
4172 }
4173 })
4174 }
4175
4176 pub fn text_object_ranges<T: ToOffset>(
4177 &self,
4178 range: Range<T>,
4179 options: TreeSitterOptions,
4180 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4181 let range = range.start.to_offset(self).saturating_sub(1)
4182 ..self.len().min(range.end.to_offset(self) + 1);
4183
4184 let mut matches =
4185 self.syntax
4186 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4187 grammar.text_object_config.as_ref().map(|c| &c.query)
4188 });
4189
4190 let configs = matches
4191 .grammars()
4192 .iter()
4193 .map(|grammar| grammar.text_object_config.as_ref())
4194 .collect::<Vec<_>>();
4195
4196 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4197
4198 iter::from_fn(move || {
4199 loop {
4200 while let Some(capture) = captures.pop() {
4201 if capture.0.overlaps(&range) {
4202 return Some(capture);
4203 }
4204 }
4205
4206 let mat = matches.peek()?;
4207
4208 let Some(config) = configs[mat.grammar_index].as_ref() else {
4209 matches.advance();
4210 continue;
4211 };
4212
4213 for capture in mat.captures {
4214 let Some(ix) = config
4215 .text_objects_by_capture_ix
4216 .binary_search_by_key(&capture.index, |e| e.0)
4217 .ok()
4218 else {
4219 continue;
4220 };
4221 let text_object = config.text_objects_by_capture_ix[ix].1;
4222 let byte_range = capture.node.byte_range();
4223
4224 let mut found = false;
4225 for (range, existing) in captures.iter_mut() {
4226 if existing == &text_object {
4227 range.start = range.start.min(byte_range.start);
4228 range.end = range.end.max(byte_range.end);
4229 found = true;
4230 break;
4231 }
4232 }
4233
4234 if !found {
4235 captures.push((byte_range, text_object));
4236 }
4237 }
4238
4239 matches.advance();
4240 }
4241 })
4242 }
4243
4244 /// Returns enclosing bracket ranges containing the given range
4245 pub fn enclosing_bracket_ranges<T: ToOffset>(
4246 &self,
4247 range: Range<T>,
4248 ) -> impl Iterator<Item = BracketMatch> + '_ {
4249 let range = range.start.to_offset(self)..range.end.to_offset(self);
4250
4251 self.bracket_ranges(range.clone()).filter(move |pair| {
4252 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4253 })
4254 }
4255
4256 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4257 ///
4258 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4259 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4260 &self,
4261 range: Range<T>,
4262 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4263 ) -> Option<(Range<usize>, Range<usize>)> {
4264 let range = range.start.to_offset(self)..range.end.to_offset(self);
4265
4266 // Get the ranges of the innermost pair of brackets.
4267 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4268
4269 for pair in self.enclosing_bracket_ranges(range) {
4270 if let Some(range_filter) = range_filter
4271 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4272 {
4273 continue;
4274 }
4275
4276 let len = pair.close_range.end - pair.open_range.start;
4277
4278 if let Some((existing_open, existing_close)) = &result {
4279 let existing_len = existing_close.end - existing_open.start;
4280 if len > existing_len {
4281 continue;
4282 }
4283 }
4284
4285 result = Some((pair.open_range, pair.close_range));
4286 }
4287
4288 result
4289 }
4290
4291 /// Returns anchor ranges for any matches of the redaction query.
4292 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4293 /// will be run on the relevant section of the buffer.
4294 pub fn redacted_ranges<T: ToOffset>(
4295 &self,
4296 range: Range<T>,
4297 ) -> impl Iterator<Item = Range<usize>> + '_ {
4298 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4299 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4300 grammar
4301 .redactions_config
4302 .as_ref()
4303 .map(|config| &config.query)
4304 });
4305
4306 let configs = syntax_matches
4307 .grammars()
4308 .iter()
4309 .map(|grammar| grammar.redactions_config.as_ref())
4310 .collect::<Vec<_>>();
4311
4312 iter::from_fn(move || {
4313 let redacted_range = syntax_matches
4314 .peek()
4315 .and_then(|mat| {
4316 configs[mat.grammar_index].and_then(|config| {
4317 mat.captures
4318 .iter()
4319 .find(|capture| capture.index == config.redaction_capture_ix)
4320 })
4321 })
4322 .map(|mat| mat.node.byte_range());
4323 syntax_matches.advance();
4324 redacted_range
4325 })
4326 }
4327
4328 pub fn injections_intersecting_range<T: ToOffset>(
4329 &self,
4330 range: Range<T>,
4331 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4332 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4333
4334 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4335 grammar
4336 .injection_config
4337 .as_ref()
4338 .map(|config| &config.query)
4339 });
4340
4341 let configs = syntax_matches
4342 .grammars()
4343 .iter()
4344 .map(|grammar| grammar.injection_config.as_ref())
4345 .collect::<Vec<_>>();
4346
4347 iter::from_fn(move || {
4348 let ranges = syntax_matches.peek().and_then(|mat| {
4349 let config = &configs[mat.grammar_index]?;
4350 let content_capture_range = mat.captures.iter().find_map(|capture| {
4351 if capture.index == config.content_capture_ix {
4352 Some(capture.node.byte_range())
4353 } else {
4354 None
4355 }
4356 })?;
4357 let language = self.language_at(content_capture_range.start)?;
4358 Some((content_capture_range, language))
4359 });
4360 syntax_matches.advance();
4361 ranges
4362 })
4363 }
4364
4365 pub fn runnable_ranges(
4366 &self,
4367 offset_range: Range<usize>,
4368 ) -> impl Iterator<Item = RunnableRange> + '_ {
4369 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4370 grammar.runnable_config.as_ref().map(|config| &config.query)
4371 });
4372
4373 let test_configs = syntax_matches
4374 .grammars()
4375 .iter()
4376 .map(|grammar| grammar.runnable_config.as_ref())
4377 .collect::<Vec<_>>();
4378
4379 iter::from_fn(move || {
4380 loop {
4381 let mat = syntax_matches.peek()?;
4382
4383 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4384 let mut run_range = None;
4385 let full_range = mat.captures.iter().fold(
4386 Range {
4387 start: usize::MAX,
4388 end: 0,
4389 },
4390 |mut acc, next| {
4391 let byte_range = next.node.byte_range();
4392 if acc.start > byte_range.start {
4393 acc.start = byte_range.start;
4394 }
4395 if acc.end < byte_range.end {
4396 acc.end = byte_range.end;
4397 }
4398 acc
4399 },
4400 );
4401 if full_range.start > full_range.end {
4402 // We did not find a full spanning range of this match.
4403 return None;
4404 }
4405 let extra_captures: SmallVec<[_; 1]> =
4406 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4407 test_configs
4408 .extra_captures
4409 .get(capture.index as usize)
4410 .cloned()
4411 .and_then(|tag_name| match tag_name {
4412 RunnableCapture::Named(name) => {
4413 Some((capture.node.byte_range(), name))
4414 }
4415 RunnableCapture::Run => {
4416 let _ = run_range.insert(capture.node.byte_range());
4417 None
4418 }
4419 })
4420 }));
4421 let run_range = run_range?;
4422 let tags = test_configs
4423 .query
4424 .property_settings(mat.pattern_index)
4425 .iter()
4426 .filter_map(|property| {
4427 if *property.key == *"tag" {
4428 property
4429 .value
4430 .as_ref()
4431 .map(|value| RunnableTag(value.to_string().into()))
4432 } else {
4433 None
4434 }
4435 })
4436 .collect();
4437 let extra_captures = extra_captures
4438 .into_iter()
4439 .map(|(range, name)| {
4440 (
4441 name.to_string(),
4442 self.text_for_range(range).collect::<String>(),
4443 )
4444 })
4445 .collect();
4446 // All tags should have the same range.
4447 Some(RunnableRange {
4448 run_range,
4449 full_range,
4450 runnable: Runnable {
4451 tags,
4452 language: mat.language,
4453 buffer: self.remote_id(),
4454 },
4455 extra_captures,
4456 buffer_id: self.remote_id(),
4457 })
4458 });
4459
4460 syntax_matches.advance();
4461 if test_range.is_some() {
4462 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4463 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4464 return test_range;
4465 }
4466 }
4467 })
4468 }
4469
4470 /// Returns selections for remote peers intersecting the given range.
4471 #[allow(clippy::type_complexity)]
4472 pub fn selections_in_range(
4473 &self,
4474 range: Range<Anchor>,
4475 include_local: bool,
4476 ) -> impl Iterator<
4477 Item = (
4478 ReplicaId,
4479 bool,
4480 CursorShape,
4481 impl Iterator<Item = &Selection<Anchor>> + '_,
4482 ),
4483 > + '_ {
4484 self.remote_selections
4485 .iter()
4486 .filter(move |(replica_id, set)| {
4487 (include_local || **replica_id != self.text.replica_id())
4488 && !set.selections.is_empty()
4489 })
4490 .map(move |(replica_id, set)| {
4491 let start_ix = match set.selections.binary_search_by(|probe| {
4492 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4493 }) {
4494 Ok(ix) | Err(ix) => ix,
4495 };
4496 let end_ix = match set.selections.binary_search_by(|probe| {
4497 probe.start.cmp(&range.end, self).then(Ordering::Less)
4498 }) {
4499 Ok(ix) | Err(ix) => ix,
4500 };
4501
4502 (
4503 *replica_id,
4504 set.line_mode,
4505 set.cursor_shape,
4506 set.selections[start_ix..end_ix].iter(),
4507 )
4508 })
4509 }
4510
4511 /// Returns if the buffer contains any diagnostics.
4512 pub fn has_diagnostics(&self) -> bool {
4513 !self.diagnostics.is_empty()
4514 }
4515
4516 /// Returns all the diagnostics intersecting the given range.
4517 pub fn diagnostics_in_range<'a, T, O>(
4518 &'a self,
4519 search_range: Range<T>,
4520 reversed: bool,
4521 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4522 where
4523 T: 'a + Clone + ToOffset,
4524 O: 'a + FromAnchor,
4525 {
4526 let mut iterators: Vec<_> = self
4527 .diagnostics
4528 .iter()
4529 .map(|(_, collection)| {
4530 collection
4531 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4532 .peekable()
4533 })
4534 .collect();
4535
4536 std::iter::from_fn(move || {
4537 let (next_ix, _) = iterators
4538 .iter_mut()
4539 .enumerate()
4540 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4541 .min_by(|(_, a), (_, b)| {
4542 let cmp = a
4543 .range
4544 .start
4545 .cmp(&b.range.start, self)
4546 // when range is equal, sort by diagnostic severity
4547 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4548 // and stabilize order with group_id
4549 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4550 if reversed { cmp.reverse() } else { cmp }
4551 })?;
4552 iterators[next_ix]
4553 .next()
4554 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4555 diagnostic,
4556 range: FromAnchor::from_anchor(&range.start, self)
4557 ..FromAnchor::from_anchor(&range.end, self),
4558 })
4559 })
4560 }
4561
4562 /// Returns all the diagnostic groups associated with the given
4563 /// language server ID. If no language server ID is provided,
4564 /// all diagnostics groups are returned.
4565 pub fn diagnostic_groups(
4566 &self,
4567 language_server_id: Option<LanguageServerId>,
4568 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4569 let mut groups = Vec::new();
4570
4571 if let Some(language_server_id) = language_server_id {
4572 if let Ok(ix) = self
4573 .diagnostics
4574 .binary_search_by_key(&language_server_id, |e| e.0)
4575 {
4576 self.diagnostics[ix]
4577 .1
4578 .groups(language_server_id, &mut groups, self);
4579 }
4580 } else {
4581 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4582 diagnostics.groups(*language_server_id, &mut groups, self);
4583 }
4584 }
4585
4586 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4587 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4588 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4589 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4590 });
4591
4592 groups
4593 }
4594
4595 /// Returns an iterator over the diagnostics for the given group.
4596 pub fn diagnostic_group<O>(
4597 &self,
4598 group_id: usize,
4599 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4600 where
4601 O: FromAnchor + 'static,
4602 {
4603 self.diagnostics
4604 .iter()
4605 .flat_map(move |(_, set)| set.group(group_id, self))
4606 }
4607
4608 /// An integer version number that accounts for all updates besides
4609 /// the buffer's text itself (which is versioned via a version vector).
4610 pub fn non_text_state_update_count(&self) -> usize {
4611 self.non_text_state_update_count
4612 }
4613
4614 /// An integer version that changes when the buffer's syntax changes.
4615 pub fn syntax_update_count(&self) -> usize {
4616 self.syntax.update_count()
4617 }
4618
4619 /// Returns a snapshot of underlying file.
4620 pub fn file(&self) -> Option<&Arc<dyn File>> {
4621 self.file.as_ref()
4622 }
4623
4624 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4625 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4626 if let Some(file) = self.file() {
4627 if file.path().file_name().is_none() || include_root {
4628 Some(file.full_path(cx))
4629 } else {
4630 Some(file.path().to_path_buf())
4631 }
4632 } else {
4633 None
4634 }
4635 }
4636
4637 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4638 let query_str = query.fuzzy_contents;
4639 if query_str.is_some_and(|query| query.is_empty()) {
4640 return BTreeMap::default();
4641 }
4642
4643 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4644 language,
4645 override_id: None,
4646 }));
4647
4648 let mut query_ix = 0;
4649 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4650 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4651
4652 let mut words = BTreeMap::default();
4653 let mut current_word_start_ix = None;
4654 let mut chunk_ix = query.range.start;
4655 for chunk in self.chunks(query.range, false) {
4656 for (i, c) in chunk.text.char_indices() {
4657 let ix = chunk_ix + i;
4658 if classifier.is_word(c) {
4659 if current_word_start_ix.is_none() {
4660 current_word_start_ix = Some(ix);
4661 }
4662
4663 if let Some(query_chars) = &query_chars
4664 && query_ix < query_len
4665 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4666 {
4667 query_ix += 1;
4668 }
4669 continue;
4670 } else if let Some(word_start) = current_word_start_ix.take()
4671 && query_ix == query_len
4672 {
4673 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4674 let mut word_text = self.text_for_range(word_start..ix).peekable();
4675 let first_char = word_text
4676 .peek()
4677 .and_then(|first_chunk| first_chunk.chars().next());
4678 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4679 if !query.skip_digits
4680 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4681 {
4682 words.insert(word_text.collect(), word_range);
4683 }
4684 }
4685 query_ix = 0;
4686 }
4687 chunk_ix += chunk.text.len();
4688 }
4689
4690 words
4691 }
4692}
4693
4694pub struct WordsQuery<'a> {
4695 /// Only returns words with all chars from the fuzzy string in them.
4696 pub fuzzy_contents: Option<&'a str>,
4697 /// Skips words that start with a digit.
4698 pub skip_digits: bool,
4699 /// Buffer offset range, to look for words.
4700 pub range: Range<usize>,
4701}
4702
4703fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4704 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4705}
4706
4707fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4708 let mut result = IndentSize::spaces(0);
4709 for c in text {
4710 let kind = match c {
4711 ' ' => IndentKind::Space,
4712 '\t' => IndentKind::Tab,
4713 _ => break,
4714 };
4715 if result.len == 0 {
4716 result.kind = kind;
4717 }
4718 result.len += 1;
4719 }
4720 result
4721}
4722
4723impl Clone for BufferSnapshot {
4724 fn clone(&self) -> Self {
4725 Self {
4726 text: self.text.clone(),
4727 syntax: self.syntax.clone(),
4728 file: self.file.clone(),
4729 remote_selections: self.remote_selections.clone(),
4730 diagnostics: self.diagnostics.clone(),
4731 language: self.language.clone(),
4732 non_text_state_update_count: self.non_text_state_update_count,
4733 }
4734 }
4735}
4736
4737impl Deref for BufferSnapshot {
4738 type Target = text::BufferSnapshot;
4739
4740 fn deref(&self) -> &Self::Target {
4741 &self.text
4742 }
4743}
4744
4745unsafe impl Send for BufferChunks<'_> {}
4746
4747impl<'a> BufferChunks<'a> {
4748 pub(crate) fn new(
4749 text: &'a Rope,
4750 range: Range<usize>,
4751 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4752 diagnostics: bool,
4753 buffer_snapshot: Option<&'a BufferSnapshot>,
4754 ) -> Self {
4755 let mut highlights = None;
4756 if let Some((captures, highlight_maps)) = syntax {
4757 highlights = Some(BufferChunkHighlights {
4758 captures,
4759 next_capture: None,
4760 stack: Default::default(),
4761 highlight_maps,
4762 })
4763 }
4764
4765 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4766 let chunks = text.chunks_in_range(range.clone());
4767
4768 let mut this = BufferChunks {
4769 range,
4770 buffer_snapshot,
4771 chunks,
4772 diagnostic_endpoints,
4773 error_depth: 0,
4774 warning_depth: 0,
4775 information_depth: 0,
4776 hint_depth: 0,
4777 unnecessary_depth: 0,
4778 underline: true,
4779 highlights,
4780 };
4781 this.initialize_diagnostic_endpoints();
4782 this
4783 }
4784
4785 /// Seeks to the given byte offset in the buffer.
4786 pub fn seek(&mut self, range: Range<usize>) {
4787 let old_range = std::mem::replace(&mut self.range, range.clone());
4788 self.chunks.set_range(self.range.clone());
4789 if let Some(highlights) = self.highlights.as_mut() {
4790 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4791 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4792 highlights
4793 .stack
4794 .retain(|(end_offset, _)| *end_offset > range.start);
4795 if let Some(capture) = &highlights.next_capture
4796 && range.start >= capture.node.start_byte()
4797 {
4798 let next_capture_end = capture.node.end_byte();
4799 if range.start < next_capture_end {
4800 highlights.stack.push((
4801 next_capture_end,
4802 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4803 ));
4804 }
4805 highlights.next_capture.take();
4806 }
4807 } else if let Some(snapshot) = self.buffer_snapshot {
4808 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4809 *highlights = BufferChunkHighlights {
4810 captures,
4811 next_capture: None,
4812 stack: Default::default(),
4813 highlight_maps,
4814 };
4815 } else {
4816 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4817 // Seeking such BufferChunks is not supported.
4818 debug_assert!(
4819 false,
4820 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4821 );
4822 }
4823
4824 highlights.captures.set_byte_range(self.range.clone());
4825 self.initialize_diagnostic_endpoints();
4826 }
4827 }
4828
4829 fn initialize_diagnostic_endpoints(&mut self) {
4830 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4831 && let Some(buffer) = self.buffer_snapshot
4832 {
4833 let mut diagnostic_endpoints = Vec::new();
4834 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4835 diagnostic_endpoints.push(DiagnosticEndpoint {
4836 offset: entry.range.start,
4837 is_start: true,
4838 severity: entry.diagnostic.severity,
4839 is_unnecessary: entry.diagnostic.is_unnecessary,
4840 underline: entry.diagnostic.underline,
4841 });
4842 diagnostic_endpoints.push(DiagnosticEndpoint {
4843 offset: entry.range.end,
4844 is_start: false,
4845 severity: entry.diagnostic.severity,
4846 is_unnecessary: entry.diagnostic.is_unnecessary,
4847 underline: entry.diagnostic.underline,
4848 });
4849 }
4850 diagnostic_endpoints
4851 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4852 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4853 self.hint_depth = 0;
4854 self.error_depth = 0;
4855 self.warning_depth = 0;
4856 self.information_depth = 0;
4857 }
4858 }
4859
4860 /// The current byte offset in the buffer.
4861 pub fn offset(&self) -> usize {
4862 self.range.start
4863 }
4864
4865 pub fn range(&self) -> Range<usize> {
4866 self.range.clone()
4867 }
4868
4869 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4870 let depth = match endpoint.severity {
4871 DiagnosticSeverity::ERROR => &mut self.error_depth,
4872 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4873 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4874 DiagnosticSeverity::HINT => &mut self.hint_depth,
4875 _ => return,
4876 };
4877 if endpoint.is_start {
4878 *depth += 1;
4879 } else {
4880 *depth -= 1;
4881 }
4882
4883 if endpoint.is_unnecessary {
4884 if endpoint.is_start {
4885 self.unnecessary_depth += 1;
4886 } else {
4887 self.unnecessary_depth -= 1;
4888 }
4889 }
4890 }
4891
4892 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4893 if self.error_depth > 0 {
4894 Some(DiagnosticSeverity::ERROR)
4895 } else if self.warning_depth > 0 {
4896 Some(DiagnosticSeverity::WARNING)
4897 } else if self.information_depth > 0 {
4898 Some(DiagnosticSeverity::INFORMATION)
4899 } else if self.hint_depth > 0 {
4900 Some(DiagnosticSeverity::HINT)
4901 } else {
4902 None
4903 }
4904 }
4905
4906 fn current_code_is_unnecessary(&self) -> bool {
4907 self.unnecessary_depth > 0
4908 }
4909}
4910
4911impl<'a> Iterator for BufferChunks<'a> {
4912 type Item = Chunk<'a>;
4913
4914 fn next(&mut self) -> Option<Self::Item> {
4915 let mut next_capture_start = usize::MAX;
4916 let mut next_diagnostic_endpoint = usize::MAX;
4917
4918 if let Some(highlights) = self.highlights.as_mut() {
4919 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4920 if *parent_capture_end <= self.range.start {
4921 highlights.stack.pop();
4922 } else {
4923 break;
4924 }
4925 }
4926
4927 if highlights.next_capture.is_none() {
4928 highlights.next_capture = highlights.captures.next();
4929 }
4930
4931 while let Some(capture) = highlights.next_capture.as_ref() {
4932 if self.range.start < capture.node.start_byte() {
4933 next_capture_start = capture.node.start_byte();
4934 break;
4935 } else {
4936 let highlight_id =
4937 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4938 highlights
4939 .stack
4940 .push((capture.node.end_byte(), highlight_id));
4941 highlights.next_capture = highlights.captures.next();
4942 }
4943 }
4944 }
4945
4946 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4947 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4948 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4949 if endpoint.offset <= self.range.start {
4950 self.update_diagnostic_depths(endpoint);
4951 diagnostic_endpoints.next();
4952 self.underline = endpoint.underline;
4953 } else {
4954 next_diagnostic_endpoint = endpoint.offset;
4955 break;
4956 }
4957 }
4958 }
4959 self.diagnostic_endpoints = diagnostic_endpoints;
4960
4961 if let Some(ChunkBitmaps {
4962 text: chunk,
4963 chars: chars_map,
4964 tabs,
4965 }) = self.chunks.peek_tabs()
4966 {
4967 let chunk_start = self.range.start;
4968 let mut chunk_end = (self.chunks.offset() + chunk.len())
4969 .min(next_capture_start)
4970 .min(next_diagnostic_endpoint);
4971 let mut highlight_id = None;
4972 if let Some(highlights) = self.highlights.as_ref()
4973 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4974 {
4975 chunk_end = chunk_end.min(*parent_capture_end);
4976 highlight_id = Some(*parent_highlight_id);
4977 }
4978
4979 let slice =
4980 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4981 let bit_end = chunk_end - self.chunks.offset();
4982
4983 let mask = if bit_end >= 128 {
4984 u128::MAX
4985 } else {
4986 (1u128 << bit_end) - 1
4987 };
4988 let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask;
4989 let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask;
4990
4991 self.range.start = chunk_end;
4992 if self.range.start == self.chunks.offset() + chunk.len() {
4993 self.chunks.next().unwrap();
4994 }
4995
4996 Some(Chunk {
4997 text: slice,
4998 syntax_highlight_id: highlight_id,
4999 underline: self.underline,
5000 diagnostic_severity: self.current_diagnostic_severity(),
5001 is_unnecessary: self.current_code_is_unnecessary(),
5002 tabs,
5003 chars: chars_map,
5004 ..Chunk::default()
5005 })
5006 } else {
5007 None
5008 }
5009 }
5010}
5011
5012impl operation_queue::Operation for Operation {
5013 fn lamport_timestamp(&self) -> clock::Lamport {
5014 match self {
5015 Operation::Buffer(_) => {
5016 unreachable!("buffer operations should never be deferred at this layer")
5017 }
5018 Operation::UpdateDiagnostics {
5019 lamport_timestamp, ..
5020 }
5021 | Operation::UpdateSelections {
5022 lamport_timestamp, ..
5023 }
5024 | Operation::UpdateCompletionTriggers {
5025 lamport_timestamp, ..
5026 }
5027 | Operation::UpdateLineEnding {
5028 lamport_timestamp, ..
5029 } => *lamport_timestamp,
5030 }
5031 }
5032}
5033
5034impl Default for Diagnostic {
5035 fn default() -> Self {
5036 Self {
5037 source: Default::default(),
5038 source_kind: DiagnosticSourceKind::Other,
5039 code: None,
5040 code_description: None,
5041 severity: DiagnosticSeverity::ERROR,
5042 message: Default::default(),
5043 markdown: None,
5044 group_id: 0,
5045 is_primary: false,
5046 is_disk_based: false,
5047 is_unnecessary: false,
5048 underline: true,
5049 data: None,
5050 }
5051 }
5052}
5053
5054impl IndentSize {
5055 /// Returns an [`IndentSize`] representing the given spaces.
5056 pub fn spaces(len: u32) -> Self {
5057 Self {
5058 len,
5059 kind: IndentKind::Space,
5060 }
5061 }
5062
5063 /// Returns an [`IndentSize`] representing a tab.
5064 pub fn tab() -> Self {
5065 Self {
5066 len: 1,
5067 kind: IndentKind::Tab,
5068 }
5069 }
5070
5071 /// An iterator over the characters represented by this [`IndentSize`].
5072 pub fn chars(&self) -> impl Iterator<Item = char> {
5073 iter::repeat(self.char()).take(self.len as usize)
5074 }
5075
5076 /// The character representation of this [`IndentSize`].
5077 pub fn char(&self) -> char {
5078 match self.kind {
5079 IndentKind::Space => ' ',
5080 IndentKind::Tab => '\t',
5081 }
5082 }
5083
5084 /// Consumes the current [`IndentSize`] and returns a new one that has
5085 /// been shrunk or enlarged by the given size along the given direction.
5086 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5087 match direction {
5088 Ordering::Less => {
5089 if self.kind == size.kind && self.len >= size.len {
5090 self.len -= size.len;
5091 }
5092 }
5093 Ordering::Equal => {}
5094 Ordering::Greater => {
5095 if self.len == 0 {
5096 self = size;
5097 } else if self.kind == size.kind {
5098 self.len += size.len;
5099 }
5100 }
5101 }
5102 self
5103 }
5104
5105 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5106 match self.kind {
5107 IndentKind::Space => self.len as usize,
5108 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5109 }
5110 }
5111}
5112
5113#[cfg(any(test, feature = "test-support"))]
5114pub struct TestFile {
5115 pub path: Arc<Path>,
5116 pub root_name: String,
5117 pub local_root: Option<PathBuf>,
5118}
5119
5120#[cfg(any(test, feature = "test-support"))]
5121impl File for TestFile {
5122 fn path(&self) -> &Arc<Path> {
5123 &self.path
5124 }
5125
5126 fn full_path(&self, _: &gpui::App) -> PathBuf {
5127 PathBuf::from(&self.root_name).join(self.path.as_ref())
5128 }
5129
5130 fn as_local(&self) -> Option<&dyn LocalFile> {
5131 if self.local_root.is_some() {
5132 Some(self)
5133 } else {
5134 None
5135 }
5136 }
5137
5138 fn disk_state(&self) -> DiskState {
5139 unimplemented!()
5140 }
5141
5142 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
5143 self.path().file_name().unwrap_or(self.root_name.as_ref())
5144 }
5145
5146 fn worktree_id(&self, _: &App) -> WorktreeId {
5147 WorktreeId::from_usize(0)
5148 }
5149
5150 fn to_proto(&self, _: &App) -> rpc::proto::File {
5151 unimplemented!()
5152 }
5153
5154 fn is_private(&self) -> bool {
5155 false
5156 }
5157}
5158
5159#[cfg(any(test, feature = "test-support"))]
5160impl LocalFile for TestFile {
5161 fn abs_path(&self, _cx: &App) -> PathBuf {
5162 PathBuf::from(self.local_root.as_ref().unwrap())
5163 .join(&self.root_name)
5164 .join(self.path.as_ref())
5165 }
5166
5167 fn load(&self, _cx: &App) -> Task<Result<String>> {
5168 unimplemented!()
5169 }
5170
5171 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5172 unimplemented!()
5173 }
5174}
5175
5176pub(crate) fn contiguous_ranges(
5177 values: impl Iterator<Item = u32>,
5178 max_len: usize,
5179) -> impl Iterator<Item = Range<u32>> {
5180 let mut values = values;
5181 let mut current_range: Option<Range<u32>> = None;
5182 std::iter::from_fn(move || {
5183 loop {
5184 if let Some(value) = values.next() {
5185 if let Some(range) = &mut current_range
5186 && value == range.end
5187 && range.len() < max_len
5188 {
5189 range.end += 1;
5190 continue;
5191 }
5192
5193 let prev_range = current_range.clone();
5194 current_range = Some(value..(value + 1));
5195 if prev_range.is_some() {
5196 return prev_range;
5197 }
5198 } else {
5199 return current_range.take();
5200 }
5201 }
5202 })
5203}
5204
5205#[derive(Default, Debug)]
5206pub struct CharClassifier {
5207 scope: Option<LanguageScope>,
5208 for_completion: bool,
5209 ignore_punctuation: bool,
5210}
5211
5212impl CharClassifier {
5213 pub fn new(scope: Option<LanguageScope>) -> Self {
5214 Self {
5215 scope,
5216 for_completion: false,
5217 ignore_punctuation: false,
5218 }
5219 }
5220
5221 pub fn for_completion(self, for_completion: bool) -> Self {
5222 Self {
5223 for_completion,
5224 ..self
5225 }
5226 }
5227
5228 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5229 Self {
5230 ignore_punctuation,
5231 ..self
5232 }
5233 }
5234
5235 pub fn is_whitespace(&self, c: char) -> bool {
5236 self.kind(c) == CharKind::Whitespace
5237 }
5238
5239 pub fn is_word(&self, c: char) -> bool {
5240 self.kind(c) == CharKind::Word
5241 }
5242
5243 pub fn is_punctuation(&self, c: char) -> bool {
5244 self.kind(c) == CharKind::Punctuation
5245 }
5246
5247 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5248 if c.is_alphanumeric() || c == '_' {
5249 return CharKind::Word;
5250 }
5251
5252 if let Some(scope) = &self.scope {
5253 let characters = if self.for_completion {
5254 scope.completion_query_characters()
5255 } else {
5256 scope.word_characters()
5257 };
5258 if let Some(characters) = characters
5259 && characters.contains(&c)
5260 {
5261 return CharKind::Word;
5262 }
5263 }
5264
5265 if c.is_whitespace() {
5266 return CharKind::Whitespace;
5267 }
5268
5269 if ignore_punctuation {
5270 CharKind::Word
5271 } else {
5272 CharKind::Punctuation
5273 }
5274 }
5275
5276 pub fn kind(&self, c: char) -> CharKind {
5277 self.kind_with(c, self.ignore_punctuation)
5278 }
5279}
5280
5281/// Find all of the ranges of whitespace that occur at the ends of lines
5282/// in the given rope.
5283///
5284/// This could also be done with a regex search, but this implementation
5285/// avoids copying text.
5286pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5287 let mut ranges = Vec::new();
5288
5289 let mut offset = 0;
5290 let mut prev_chunk_trailing_whitespace_range = 0..0;
5291 for chunk in rope.chunks() {
5292 let mut prev_line_trailing_whitespace_range = 0..0;
5293 for (i, line) in chunk.split('\n').enumerate() {
5294 let line_end_offset = offset + line.len();
5295 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5296 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5297
5298 if i == 0 && trimmed_line_len == 0 {
5299 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5300 }
5301 if !prev_line_trailing_whitespace_range.is_empty() {
5302 ranges.push(prev_line_trailing_whitespace_range);
5303 }
5304
5305 offset = line_end_offset + 1;
5306 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5307 }
5308
5309 offset -= 1;
5310 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5311 }
5312
5313 if !prev_chunk_trailing_whitespace_range.is_empty() {
5314 ranges.push(prev_chunk_trailing_whitespace_range);
5315 }
5316
5317 ranges
5318}