1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::{SettingsUi, WorktreeId};
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(
177 Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
178)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underline,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<NumberOrString>,
207 pub code_description: Option<lsp::Uri>,
208 /// Whether this diagnostic is a hint, warning, or error.
209 pub severity: DiagnosticSeverity,
210 /// The human-readable message associated with this diagnostic.
211 pub message: String,
212 /// The human-readable message (in markdown format)
213 pub markdown: Option<String>,
214 /// An id that identifies the group to which this diagnostic belongs.
215 ///
216 /// When a language server produces a diagnostic with
217 /// one or more associated diagnostics, those diagnostics are all
218 /// assigned a single group ID.
219 pub group_id: usize,
220 /// Whether this diagnostic is the primary diagnostic for its group.
221 ///
222 /// In a given group, the primary diagnostic is the top-level diagnostic
223 /// returned by the language server. The non-primary diagnostics are the
224 /// associated diagnostics.
225 pub is_primary: bool,
226 /// Whether this diagnostic is considered to originate from an analysis of
227 /// files on disk, as opposed to any unsaved buffer contents. This is a
228 /// property of a given diagnostic source, and is configured for a given
229 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
230 /// for the language server.
231 pub is_disk_based: bool,
232 /// Whether this diagnostic marks unnecessary code.
233 pub is_unnecessary: bool,
234 /// Quick separation of diagnostics groups based by their source.
235 pub source_kind: DiagnosticSourceKind,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238 /// Whether to underline the corresponding text range in the editor.
239 pub underline: bool,
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
243pub enum DiagnosticSourceKind {
244 Pulled,
245 Pushed,
246 Other,
247}
248
249/// An operation used to synchronize this buffer with its other replicas.
250#[derive(Clone, Debug, PartialEq)]
251pub enum Operation {
252 /// A text operation.
253 Buffer(text::Operation),
254
255 /// An update to the buffer's diagnostics.
256 UpdateDiagnostics {
257 /// The id of the language server that produced the new diagnostics.
258 server_id: LanguageServerId,
259 /// The diagnostics.
260 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
261 /// The buffer's lamport timestamp.
262 lamport_timestamp: clock::Lamport,
263 },
264
265 /// An update to the most recent selections in this buffer.
266 UpdateSelections {
267 /// The selections.
268 selections: Arc<[Selection<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// Whether the selections are in 'line mode'.
272 line_mode: bool,
273 /// The [`CursorShape`] associated with these selections.
274 cursor_shape: CursorShape,
275 },
276
277 /// An update to the characters that should trigger autocompletion
278 /// for this buffer.
279 UpdateCompletionTriggers {
280 /// The characters that trigger autocompletion.
281 triggers: Vec<String>,
282 /// The buffer's lamport timestamp.
283 lamport_timestamp: clock::Lamport,
284 /// The language server ID.
285 server_id: LanguageServerId,
286 },
287
288 /// An update to the line ending type of this buffer.
289 UpdateLineEnding {
290 /// The line ending type.
291 line_ending: LineEnding,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 },
295}
296
297/// An event that occurs in a buffer.
298#[derive(Clone, Debug, PartialEq)]
299pub enum BufferEvent {
300 /// The buffer was changed in a way that must be
301 /// propagated to its other replicas.
302 Operation {
303 operation: Operation,
304 is_local: bool,
305 },
306 /// The buffer was edited.
307 Edited,
308 /// The buffer's `dirty` bit changed.
309 DirtyChanged,
310 /// The buffer was saved.
311 Saved,
312 /// The buffer's file was changed on disk.
313 FileHandleChanged,
314 /// The buffer was reloaded.
315 Reloaded,
316 /// The buffer is in need of a reload
317 ReloadNeeded,
318 /// The buffer's language was changed.
319 LanguageChanged,
320 /// The buffer's syntax trees were updated.
321 Reparsed,
322 /// The buffer's diagnostics were updated.
323 DiagnosticsUpdated,
324 /// The buffer gained or lost editing capabilities.
325 CapabilityChanged,
326}
327
328/// The file associated with a buffer.
329pub trait File: Send + Sync + Any {
330 /// Returns the [`LocalFile`] associated with this file, if the
331 /// file is local.
332 fn as_local(&self) -> Option<&dyn LocalFile>;
333
334 /// Returns whether this file is local.
335 fn is_local(&self) -> bool {
336 self.as_local().is_some()
337 }
338
339 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
340 /// only available in some states, such as modification time.
341 fn disk_state(&self) -> DiskState;
342
343 /// Returns the path of this file relative to the worktree's root directory.
344 fn path(&self) -> &Arc<Path>;
345
346 /// Returns the path of this file relative to the worktree's parent directory (this means it
347 /// includes the name of the worktree's root folder).
348 fn full_path(&self, cx: &App) -> PathBuf;
349
350 /// Returns the last component of this handle's absolute path. If this handle refers to the root
351 /// of its worktree, then this method will return the name of the worktree itself.
352 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
353
354 /// Returns the id of the worktree to which this file belongs.
355 ///
356 /// This is needed for looking up project-specific settings.
357 fn worktree_id(&self, cx: &App) -> WorktreeId;
358
359 /// Converts this file into a protobuf message.
360 fn to_proto(&self, cx: &App) -> rpc::proto::File;
361
362 /// Return whether Zed considers this to be a private file.
363 fn is_private(&self) -> bool;
364}
365
366/// The file's storage status - whether it's stored (`Present`), and if so when it was last
367/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
368/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
369/// indicator for new files.
370#[derive(Copy, Clone, Debug, PartialEq)]
371pub enum DiskState {
372 /// File created in Zed that has not been saved.
373 New,
374 /// File present on the filesystem.
375 Present { mtime: MTime },
376 /// Deleted file that was previously present.
377 Deleted,
378}
379
380impl DiskState {
381 /// Returns the file's last known modification time on disk.
382 pub fn mtime(self) -> Option<MTime> {
383 match self {
384 DiskState::New => None,
385 DiskState::Present { mtime } => Some(mtime),
386 DiskState::Deleted => None,
387 }
388 }
389
390 pub fn exists(&self) -> bool {
391 match self {
392 DiskState::New => false,
393 DiskState::Present { .. } => true,
394 DiskState::Deleted => false,
395 }
396 }
397}
398
399/// The file associated with a buffer, in the case where the file is on the local disk.
400pub trait LocalFile: File {
401 /// Returns the absolute path of this file
402 fn abs_path(&self, cx: &App) -> PathBuf;
403
404 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
405 fn load(&self, cx: &App) -> Task<Result<String>>;
406
407 /// Loads the file's contents from disk.
408 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
409}
410
411/// The auto-indent behavior associated with an editing operation.
412/// For some editing operations, each affected line of text has its
413/// indentation recomputed. For other operations, the entire block
414/// of edited text is adjusted uniformly.
415#[derive(Clone, Debug)]
416pub enum AutoindentMode {
417 /// Indent each line of inserted text.
418 EachLine,
419 /// Apply the same indentation adjustment to all of the lines
420 /// in a given insertion.
421 Block {
422 /// The original indentation column of the first line of each
423 /// insertion, if it has been copied.
424 ///
425 /// Knowing this makes it possible to preserve the relative indentation
426 /// of every line in the insertion from when it was copied.
427 ///
428 /// If the original indent column is `a`, and the first line of insertion
429 /// is then auto-indented to column `b`, then every other line of
430 /// the insertion will be auto-indented to column `b - a`
431 original_indent_columns: Vec<Option<u32>>,
432 },
433}
434
435#[derive(Clone)]
436struct AutoindentRequest {
437 before_edit: BufferSnapshot,
438 entries: Vec<AutoindentRequestEntry>,
439 is_block_mode: bool,
440 ignore_empty_lines: bool,
441}
442
443#[derive(Debug, Clone)]
444struct AutoindentRequestEntry {
445 /// A range of the buffer whose indentation should be adjusted.
446 range: Range<Anchor>,
447 /// Whether or not these lines should be considered brand new, for the
448 /// purpose of auto-indent. When text is not new, its indentation will
449 /// only be adjusted if the suggested indentation level has *changed*
450 /// since the edit was made.
451 first_line_is_new: bool,
452 indent_size: IndentSize,
453 original_indent_column: Option<u32>,
454}
455
456#[derive(Debug)]
457struct IndentSuggestion {
458 basis_row: u32,
459 delta: Ordering,
460 within_error: bool,
461}
462
463struct BufferChunkHighlights<'a> {
464 captures: SyntaxMapCaptures<'a>,
465 next_capture: Option<SyntaxMapCapture<'a>>,
466 stack: Vec<(usize, HighlightId)>,
467 highlight_maps: Vec<HighlightMap>,
468}
469
470/// An iterator that yields chunks of a buffer's text, along with their
471/// syntax highlights and diagnostic status.
472pub struct BufferChunks<'a> {
473 buffer_snapshot: Option<&'a BufferSnapshot>,
474 range: Range<usize>,
475 chunks: text::Chunks<'a>,
476 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
477 error_depth: usize,
478 warning_depth: usize,
479 information_depth: usize,
480 hint_depth: usize,
481 unnecessary_depth: usize,
482 underline: bool,
483 highlights: Option<BufferChunkHighlights<'a>>,
484}
485
486/// A chunk of a buffer's text, along with its syntax highlight and
487/// diagnostic status.
488#[derive(Clone, Debug, Default)]
489pub struct Chunk<'a> {
490 /// The text of the chunk.
491 pub text: &'a str,
492 /// The syntax highlighting style of the chunk.
493 pub syntax_highlight_id: Option<HighlightId>,
494 /// The highlight style that has been applied to this chunk in
495 /// the editor.
496 pub highlight_style: Option<HighlightStyle>,
497 /// The severity of diagnostic associated with this chunk, if any.
498 pub diagnostic_severity: Option<DiagnosticSeverity>,
499 /// Whether this chunk of text is marked as unnecessary.
500 pub is_unnecessary: bool,
501 /// Whether this chunk of text was originally a tab character.
502 pub is_tab: bool,
503 /// Whether this chunk of text was originally a tab character.
504 pub is_inlay: bool,
505 /// Whether to underline the corresponding text range in the editor.
506 pub underline: bool,
507}
508
509/// A set of edits to a given version of a buffer, computed asynchronously.
510#[derive(Debug)]
511pub struct Diff {
512 pub base_version: clock::Global,
513 pub line_ending: LineEnding,
514 pub edits: Vec<(Range<usize>, Arc<str>)>,
515}
516
517#[derive(Debug, Clone, Copy)]
518pub(crate) struct DiagnosticEndpoint {
519 offset: usize,
520 is_start: bool,
521 underline: bool,
522 severity: DiagnosticSeverity,
523 is_unnecessary: bool,
524}
525
526/// A class of characters, used for characterizing a run of text.
527#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
528pub enum CharKind {
529 /// Whitespace.
530 Whitespace,
531 /// Punctuation.
532 Punctuation,
533 /// Word.
534 Word,
535}
536
537/// A runnable is a set of data about a region that could be resolved into a task
538pub struct Runnable {
539 pub tags: SmallVec<[RunnableTag; 1]>,
540 pub language: Arc<Language>,
541 pub buffer: BufferId,
542}
543
544#[derive(Default, Clone, Debug)]
545pub struct HighlightedText {
546 pub text: SharedString,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550#[derive(Default, Debug)]
551struct HighlightedTextBuilder {
552 pub text: String,
553 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
554}
555
556impl HighlightedText {
557 pub fn from_buffer_range<T: ToOffset>(
558 range: Range<T>,
559 snapshot: &text::BufferSnapshot,
560 syntax_snapshot: &SyntaxSnapshot,
561 override_style: Option<HighlightStyle>,
562 syntax_theme: &SyntaxTheme,
563 ) -> Self {
564 let mut highlighted_text = HighlightedTextBuilder::default();
565 highlighted_text.add_text_from_buffer_range(
566 range,
567 snapshot,
568 syntax_snapshot,
569 override_style,
570 syntax_theme,
571 );
572 highlighted_text.build()
573 }
574
575 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
576 gpui::StyledText::new(self.text.clone())
577 .with_default_highlights(default_style, self.highlights.iter().cloned())
578 }
579
580 /// Returns the first line without leading whitespace unless highlighted
581 /// and a boolean indicating if there are more lines after
582 pub fn first_line_preview(self) -> (Self, bool) {
583 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
584 let first_line = &self.text[..newline_ix];
585
586 // Trim leading whitespace, unless an edit starts prior to it.
587 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
588 if let Some((first_highlight_range, _)) = self.highlights.first() {
589 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
590 }
591
592 let preview_text = &first_line[preview_start_ix..];
593 let preview_highlights = self
594 .highlights
595 .into_iter()
596 .take_while(|(range, _)| range.start < newline_ix)
597 .filter_map(|(mut range, highlight)| {
598 range.start = range.start.saturating_sub(preview_start_ix);
599 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
600 if range.is_empty() {
601 None
602 } else {
603 Some((range, highlight))
604 }
605 });
606
607 let preview = Self {
608 text: SharedString::new(preview_text),
609 highlights: preview_highlights.collect(),
610 };
611
612 (preview, self.text.len() > newline_ix)
613 }
614}
615
616impl HighlightedTextBuilder {
617 pub fn build(self) -> HighlightedText {
618 HighlightedText {
619 text: self.text.into(),
620 highlights: self.highlights,
621 }
622 }
623
624 pub fn add_text_from_buffer_range<T: ToOffset>(
625 &mut self,
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) {
632 let range = range.to_offset(snapshot);
633 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
634 let start = self.text.len();
635 self.text.push_str(chunk.text);
636 let end = self.text.len();
637
638 if let Some(highlight_style) = chunk
639 .syntax_highlight_id
640 .and_then(|id| id.style(syntax_theme))
641 {
642 let highlight_style = override_style.map_or(highlight_style, |override_style| {
643 highlight_style.highlight(override_style)
644 });
645 self.highlights.push((start..end, highlight_style));
646 } else if let Some(override_style) = override_style {
647 self.highlights.push((start..end, override_style));
648 }
649 }
650 }
651
652 fn highlighted_chunks<'a>(
653 range: Range<usize>,
654 snapshot: &'a text::BufferSnapshot,
655 syntax_snapshot: &'a SyntaxSnapshot,
656 ) -> BufferChunks<'a> {
657 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
658 grammar.highlights_query.as_ref()
659 });
660
661 let highlight_maps = captures
662 .grammars()
663 .iter()
664 .map(|grammar| grammar.highlight_map())
665 .collect();
666
667 BufferChunks::new(
668 snapshot.as_rope(),
669 range,
670 Some((captures, highlight_maps)),
671 false,
672 None,
673 )
674 }
675}
676
677#[derive(Clone)]
678pub struct EditPreview {
679 old_snapshot: text::BufferSnapshot,
680 applied_edits_snapshot: text::BufferSnapshot,
681 syntax_snapshot: SyntaxSnapshot,
682}
683
684impl EditPreview {
685 pub fn highlight_edits(
686 &self,
687 current_snapshot: &BufferSnapshot,
688 edits: &[(Range<Anchor>, String)],
689 include_deletions: bool,
690 cx: &App,
691 ) -> HighlightedText {
692 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
693 return HighlightedText::default();
694 };
695
696 let mut highlighted_text = HighlightedTextBuilder::default();
697
698 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
699
700 let insertion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().created_background),
702 ..Default::default()
703 };
704 let deletion_highlight_style = HighlightStyle {
705 background_color: Some(cx.theme().status().deleted_background),
706 ..Default::default()
707 };
708 let syntax_theme = cx.theme().syntax();
709
710 for (range, edit_text) in edits {
711 let edit_new_end_in_preview_snapshot = range
712 .end
713 .bias_right(&self.old_snapshot)
714 .to_offset(&self.applied_edits_snapshot);
715 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
716
717 let unchanged_range_in_preview_snapshot =
718 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
719 if !unchanged_range_in_preview_snapshot.is_empty() {
720 highlighted_text.add_text_from_buffer_range(
721 unchanged_range_in_preview_snapshot,
722 &self.applied_edits_snapshot,
723 &self.syntax_snapshot,
724 None,
725 syntax_theme,
726 );
727 }
728
729 let range_in_current_snapshot = range.to_offset(current_snapshot);
730 if include_deletions && !range_in_current_snapshot.is_empty() {
731 highlighted_text.add_text_from_buffer_range(
732 range_in_current_snapshot,
733 ¤t_snapshot.text,
734 ¤t_snapshot.syntax,
735 Some(deletion_highlight_style),
736 syntax_theme,
737 );
738 }
739
740 if !edit_text.is_empty() {
741 highlighted_text.add_text_from_buffer_range(
742 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
743 &self.applied_edits_snapshot,
744 &self.syntax_snapshot,
745 Some(insertion_highlight_style),
746 syntax_theme,
747 );
748 }
749
750 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
751 }
752
753 highlighted_text.add_text_from_buffer_range(
754 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
755 &self.applied_edits_snapshot,
756 &self.syntax_snapshot,
757 None,
758 syntax_theme,
759 );
760
761 highlighted_text.build()
762 }
763
764 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
765 let (first, _) = edits.first()?;
766 let (last, _) = edits.last()?;
767
768 let start = first
769 .start
770 .bias_left(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772 let end = last
773 .end
774 .bias_right(&self.old_snapshot)
775 .to_point(&self.applied_edits_snapshot);
776
777 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
778 let range = Point::new(start.row, 0)
779 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
780
781 Some(range.to_offset(&self.applied_edits_snapshot))
782 }
783}
784
785#[derive(Clone, Debug, PartialEq, Eq)]
786pub struct BracketMatch {
787 pub open_range: Range<usize>,
788 pub close_range: Range<usize>,
789 pub newline_only: bool,
790}
791
792impl Buffer {
793 /// Create a new buffer with the given base text.
794 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
795 Self::build(
796 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
797 None,
798 Capability::ReadWrite,
799 )
800 }
801
802 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
803 pub fn local_normalized(
804 base_text_normalized: Rope,
805 line_ending: LineEnding,
806 cx: &Context<Self>,
807 ) -> Self {
808 Self::build(
809 TextBuffer::new_normalized(
810 0,
811 cx.entity_id().as_non_zero_u64().into(),
812 line_ending,
813 base_text_normalized,
814 ),
815 None,
816 Capability::ReadWrite,
817 )
818 }
819
820 /// Create a new buffer that is a replica of a remote buffer.
821 pub fn remote(
822 remote_id: BufferId,
823 replica_id: ReplicaId,
824 capability: Capability,
825 base_text: impl Into<String>,
826 ) -> Self {
827 Self::build(
828 TextBuffer::new(replica_id, remote_id, base_text.into()),
829 None,
830 capability,
831 )
832 }
833
834 /// Create a new buffer that is a replica of a remote buffer, populating its
835 /// state from the given protobuf message.
836 pub fn from_proto(
837 replica_id: ReplicaId,
838 capability: Capability,
839 message: proto::BufferState,
840 file: Option<Arc<dyn File>>,
841 ) -> Result<Self> {
842 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
843 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
844 let mut this = Self::build(buffer, file, capability);
845 this.text.set_line_ending(proto::deserialize_line_ending(
846 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
847 ));
848 this.saved_version = proto::deserialize_version(&message.saved_version);
849 this.saved_mtime = message.saved_mtime.map(|time| time.into());
850 Ok(this)
851 }
852
853 /// Serialize the buffer's state to a protobuf message.
854 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
855 proto::BufferState {
856 id: self.remote_id().into(),
857 file: self.file.as_ref().map(|f| f.to_proto(cx)),
858 base_text: self.base_text().to_string(),
859 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
860 saved_version: proto::serialize_version(&self.saved_version),
861 saved_mtime: self.saved_mtime.map(|time| time.into()),
862 }
863 }
864
865 /// Serialize as protobufs all of the changes to the buffer since the given version.
866 pub fn serialize_ops(
867 &self,
868 since: Option<clock::Global>,
869 cx: &App,
870 ) -> Task<Vec<proto::Operation>> {
871 let mut operations = Vec::new();
872 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
873
874 operations.extend(self.remote_selections.iter().map(|(_, set)| {
875 proto::serialize_operation(&Operation::UpdateSelections {
876 selections: set.selections.clone(),
877 lamport_timestamp: set.lamport_timestamp,
878 line_mode: set.line_mode,
879 cursor_shape: set.cursor_shape,
880 })
881 }));
882
883 for (server_id, diagnostics) in &self.diagnostics {
884 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
885 lamport_timestamp: self.diagnostics_timestamp,
886 server_id: *server_id,
887 diagnostics: diagnostics.iter().cloned().collect(),
888 }));
889 }
890
891 for (server_id, completions) in &self.completion_triggers_per_language_server {
892 operations.push(proto::serialize_operation(
893 &Operation::UpdateCompletionTriggers {
894 triggers: completions.iter().cloned().collect(),
895 lamport_timestamp: self.completion_triggers_timestamp,
896 server_id: *server_id,
897 },
898 ));
899 }
900
901 let text_operations = self.text.operations().clone();
902 cx.background_spawn(async move {
903 let since = since.unwrap_or_default();
904 operations.extend(
905 text_operations
906 .iter()
907 .filter(|(_, op)| !since.observed(op.timestamp()))
908 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
909 );
910 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
911 operations
912 })
913 }
914
915 /// Assign a language to the buffer, returning the buffer.
916 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
917 self.set_language(Some(language), cx);
918 self
919 }
920
921 /// Returns the [`Capability`] of this buffer.
922 pub fn capability(&self) -> Capability {
923 self.capability
924 }
925
926 /// Whether this buffer can only be read.
927 pub fn read_only(&self) -> bool {
928 self.capability == Capability::ReadOnly
929 }
930
931 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
932 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
933 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
934 let snapshot = buffer.snapshot();
935 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
936 Self {
937 saved_mtime,
938 saved_version: buffer.version(),
939 preview_version: buffer.version(),
940 reload_task: None,
941 transaction_depth: 0,
942 was_dirty_before_starting_transaction: None,
943 has_unsaved_edits: Cell::new((buffer.version(), false)),
944 text: buffer,
945 branch_state: None,
946 file,
947 capability,
948 syntax_map,
949 reparse: None,
950 non_text_state_update_count: 0,
951 sync_parse_timeout: Duration::from_millis(1),
952 parse_status: watch::channel(ParseStatus::Idle),
953 autoindent_requests: Default::default(),
954 wait_for_autoindent_txs: Default::default(),
955 pending_autoindent: Default::default(),
956 language: None,
957 remote_selections: Default::default(),
958 diagnostics: Default::default(),
959 diagnostics_timestamp: Default::default(),
960 completion_triggers: Default::default(),
961 completion_triggers_per_language_server: Default::default(),
962 completion_triggers_timestamp: Default::default(),
963 deferred_ops: OperationQueue::new(),
964 has_conflict: false,
965 change_bits: Default::default(),
966 _subscriptions: Vec::new(),
967 }
968 }
969
970 pub fn build_snapshot(
971 text: Rope,
972 language: Option<Arc<Language>>,
973 language_registry: Option<Arc<LanguageRegistry>>,
974 cx: &mut App,
975 ) -> impl Future<Output = BufferSnapshot> + use<> {
976 let entity_id = cx.reserve_entity::<Self>().entity_id();
977 let buffer_id = entity_id.as_non_zero_u64().into();
978 async move {
979 let text =
980 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
981 let mut syntax = SyntaxMap::new(&text).snapshot();
982 if let Some(language) = language.clone() {
983 let language_registry = language_registry.clone();
984 syntax.reparse(&text, language_registry, language);
985 }
986 BufferSnapshot {
987 text,
988 syntax,
989 file: None,
990 diagnostics: Default::default(),
991 remote_selections: Default::default(),
992 language,
993 non_text_state_update_count: 0,
994 }
995 }
996 }
997
998 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
999 let entity_id = cx.reserve_entity::<Self>().entity_id();
1000 let buffer_id = entity_id.as_non_zero_u64().into();
1001 let text =
1002 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1003 let syntax = SyntaxMap::new(&text).snapshot();
1004 BufferSnapshot {
1005 text,
1006 syntax,
1007 file: None,
1008 diagnostics: Default::default(),
1009 remote_selections: Default::default(),
1010 language: None,
1011 non_text_state_update_count: 0,
1012 }
1013 }
1014
1015 #[cfg(any(test, feature = "test-support"))]
1016 pub fn build_snapshot_sync(
1017 text: Rope,
1018 language: Option<Arc<Language>>,
1019 language_registry: Option<Arc<LanguageRegistry>>,
1020 cx: &mut App,
1021 ) -> BufferSnapshot {
1022 let entity_id = cx.reserve_entity::<Self>().entity_id();
1023 let buffer_id = entity_id.as_non_zero_u64().into();
1024 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1025 let mut syntax = SyntaxMap::new(&text).snapshot();
1026 if let Some(language) = language.clone() {
1027 syntax.reparse(&text, language_registry, language);
1028 }
1029 BufferSnapshot {
1030 text,
1031 syntax,
1032 file: None,
1033 diagnostics: Default::default(),
1034 remote_selections: Default::default(),
1035 language,
1036 non_text_state_update_count: 0,
1037 }
1038 }
1039
1040 /// Retrieve a snapshot of the buffer's current state. This is computationally
1041 /// cheap, and allows reading from the buffer on a background thread.
1042 pub fn snapshot(&self) -> BufferSnapshot {
1043 let text = self.text.snapshot();
1044 let mut syntax_map = self.syntax_map.lock();
1045 syntax_map.interpolate(&text);
1046 let syntax = syntax_map.snapshot();
1047
1048 BufferSnapshot {
1049 text,
1050 syntax,
1051 file: self.file.clone(),
1052 remote_selections: self.remote_selections.clone(),
1053 diagnostics: self.diagnostics.clone(),
1054 language: self.language.clone(),
1055 non_text_state_update_count: self.non_text_state_update_count,
1056 }
1057 }
1058
1059 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1060 let this = cx.entity();
1061 cx.new(|cx| {
1062 let mut branch = Self {
1063 branch_state: Some(BufferBranchState {
1064 base_buffer: this.clone(),
1065 merged_operations: Default::default(),
1066 }),
1067 language: self.language.clone(),
1068 has_conflict: self.has_conflict,
1069 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1070 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1071 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1072 };
1073 if let Some(language_registry) = self.language_registry() {
1074 branch.set_language_registry(language_registry);
1075 }
1076
1077 // Reparse the branch buffer so that we get syntax highlighting immediately.
1078 branch.reparse(cx);
1079
1080 branch
1081 })
1082 }
1083
1084 pub fn preview_edits(
1085 &self,
1086 edits: Arc<[(Range<Anchor>, String)]>,
1087 cx: &App,
1088 ) -> Task<EditPreview> {
1089 let registry = self.language_registry();
1090 let language = self.language().cloned();
1091 let old_snapshot = self.text.snapshot();
1092 let mut branch_buffer = self.text.branch();
1093 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1094 cx.background_spawn(async move {
1095 if !edits.is_empty() {
1096 if let Some(language) = language.clone() {
1097 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1098 }
1099
1100 branch_buffer.edit(edits.iter().cloned());
1101 let snapshot = branch_buffer.snapshot();
1102 syntax_snapshot.interpolate(&snapshot);
1103
1104 if let Some(language) = language {
1105 syntax_snapshot.reparse(&snapshot, registry, language);
1106 }
1107 }
1108 EditPreview {
1109 old_snapshot,
1110 applied_edits_snapshot: branch_buffer.snapshot(),
1111 syntax_snapshot,
1112 }
1113 })
1114 }
1115
1116 /// Applies all of the changes in this buffer that intersect any of the
1117 /// given `ranges` to its base buffer.
1118 ///
1119 /// If `ranges` is empty, then all changes will be applied. This buffer must
1120 /// be a branch buffer to call this method.
1121 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1122 let Some(base_buffer) = self.base_buffer() else {
1123 debug_panic!("not a branch buffer");
1124 return;
1125 };
1126
1127 let mut ranges = if ranges.is_empty() {
1128 &[0..usize::MAX]
1129 } else {
1130 ranges.as_slice()
1131 }
1132 .iter()
1133 .peekable();
1134
1135 let mut edits = Vec::new();
1136 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1137 let mut is_included = false;
1138 while let Some(range) = ranges.peek() {
1139 if range.end < edit.new.start {
1140 ranges.next().unwrap();
1141 } else {
1142 if range.start <= edit.new.end {
1143 is_included = true;
1144 }
1145 break;
1146 }
1147 }
1148
1149 if is_included {
1150 edits.push((
1151 edit.old.clone(),
1152 self.text_for_range(edit.new.clone()).collect::<String>(),
1153 ));
1154 }
1155 }
1156
1157 let operation = base_buffer.update(cx, |base_buffer, cx| {
1158 // cx.emit(BufferEvent::DiffBaseChanged);
1159 base_buffer.edit(edits, None, cx)
1160 });
1161
1162 if let Some(operation) = operation
1163 && let Some(BufferBranchState {
1164 merged_operations, ..
1165 }) = &mut self.branch_state
1166 {
1167 merged_operations.push(operation);
1168 }
1169 }
1170
1171 fn on_base_buffer_event(
1172 &mut self,
1173 _: Entity<Buffer>,
1174 event: &BufferEvent,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let BufferEvent::Operation { operation, .. } = event else {
1178 return;
1179 };
1180 let Some(BufferBranchState {
1181 merged_operations, ..
1182 }) = &mut self.branch_state
1183 else {
1184 return;
1185 };
1186
1187 let mut operation_to_undo = None;
1188 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1189 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1190 {
1191 merged_operations.remove(ix);
1192 operation_to_undo = Some(operation.timestamp);
1193 }
1194
1195 self.apply_ops([operation.clone()], cx);
1196
1197 if let Some(timestamp) = operation_to_undo {
1198 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1199 self.undo_operations(counts, cx);
1200 }
1201 }
1202
1203 #[cfg(test)]
1204 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1205 &self.text
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's raw text, without any
1209 /// language-related state like the syntax tree or diagnostics.
1210 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1211 self.text.snapshot()
1212 }
1213
1214 /// The file associated with the buffer, if any.
1215 pub fn file(&self) -> Option<&Arc<dyn File>> {
1216 self.file.as_ref()
1217 }
1218
1219 /// The version of the buffer that was last saved or reloaded from disk.
1220 pub fn saved_version(&self) -> &clock::Global {
1221 &self.saved_version
1222 }
1223
1224 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1225 pub fn saved_mtime(&self) -> Option<MTime> {
1226 self.saved_mtime
1227 }
1228
1229 /// Assign a language to the buffer.
1230 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1231 self.non_text_state_update_count += 1;
1232 self.syntax_map.lock().clear(&self.text);
1233 self.language = language;
1234 self.was_changed();
1235 self.reparse(cx);
1236 cx.emit(BufferEvent::LanguageChanged);
1237 }
1238
1239 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1240 /// other languages if parts of the buffer are written in different languages.
1241 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1242 self.syntax_map
1243 .lock()
1244 .set_language_registry(language_registry);
1245 }
1246
1247 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1248 self.syntax_map.lock().language_registry()
1249 }
1250
1251 /// Assign the line ending type to the buffer.
1252 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1253 self.text.set_line_ending(line_ending);
1254
1255 let lamport_timestamp = self.text.lamport_clock.tick();
1256 self.send_operation(
1257 Operation::UpdateLineEnding {
1258 line_ending,
1259 lamport_timestamp,
1260 },
1261 true,
1262 cx,
1263 );
1264 }
1265
1266 /// Assign the buffer a new [`Capability`].
1267 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1268 if self.capability != capability {
1269 self.capability = capability;
1270 cx.emit(BufferEvent::CapabilityChanged)
1271 }
1272 }
1273
1274 /// This method is called to signal that the buffer has been saved.
1275 pub fn did_save(
1276 &mut self,
1277 version: clock::Global,
1278 mtime: Option<MTime>,
1279 cx: &mut Context<Self>,
1280 ) {
1281 self.saved_version = version;
1282 self.has_unsaved_edits
1283 .set((self.saved_version().clone(), false));
1284 self.has_conflict = false;
1285 self.saved_mtime = mtime;
1286 self.was_changed();
1287 cx.emit(BufferEvent::Saved);
1288 cx.notify();
1289 }
1290
1291 /// Reloads the contents of the buffer from disk.
1292 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1293 let (tx, rx) = futures::channel::oneshot::channel();
1294 let prev_version = self.text.version();
1295 self.reload_task = Some(cx.spawn(async move |this, cx| {
1296 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1297 let file = this.file.as_ref()?.as_local()?;
1298
1299 Some((file.disk_state().mtime(), file.load(cx)))
1300 })?
1301 else {
1302 return Ok(());
1303 };
1304
1305 let new_text = new_text.await?;
1306 let diff = this
1307 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1308 .await;
1309 this.update(cx, |this, cx| {
1310 if this.version() == diff.base_version {
1311 this.finalize_last_transaction();
1312 this.apply_diff(diff, cx);
1313 tx.send(this.finalize_last_transaction().cloned()).ok();
1314 this.has_conflict = false;
1315 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1316 } else {
1317 if !diff.edits.is_empty()
1318 || this
1319 .edits_since::<usize>(&diff.base_version)
1320 .next()
1321 .is_some()
1322 {
1323 this.has_conflict = true;
1324 }
1325
1326 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1327 }
1328
1329 this.reload_task.take();
1330 })
1331 }));
1332 rx
1333 }
1334
1335 /// This method is called to signal that the buffer has been reloaded.
1336 pub fn did_reload(
1337 &mut self,
1338 version: clock::Global,
1339 line_ending: LineEnding,
1340 mtime: Option<MTime>,
1341 cx: &mut Context<Self>,
1342 ) {
1343 self.saved_version = version;
1344 self.has_unsaved_edits
1345 .set((self.saved_version.clone(), false));
1346 self.text.set_line_ending(line_ending);
1347 self.saved_mtime = mtime;
1348 cx.emit(BufferEvent::Reloaded);
1349 cx.notify();
1350 }
1351
1352 /// Updates the [`File`] backing this buffer. This should be called when
1353 /// the file has changed or has been deleted.
1354 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1355 let was_dirty = self.is_dirty();
1356 let mut file_changed = false;
1357
1358 if let Some(old_file) = self.file.as_ref() {
1359 if new_file.path() != old_file.path() {
1360 file_changed = true;
1361 }
1362
1363 let old_state = old_file.disk_state();
1364 let new_state = new_file.disk_state();
1365 if old_state != new_state {
1366 file_changed = true;
1367 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1368 cx.emit(BufferEvent::ReloadNeeded)
1369 }
1370 }
1371 } else {
1372 file_changed = true;
1373 };
1374
1375 self.file = Some(new_file);
1376 if file_changed {
1377 self.was_changed();
1378 self.non_text_state_update_count += 1;
1379 if was_dirty != self.is_dirty() {
1380 cx.emit(BufferEvent::DirtyChanged);
1381 }
1382 cx.emit(BufferEvent::FileHandleChanged);
1383 cx.notify();
1384 }
1385 }
1386
1387 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1388 Some(self.branch_state.as_ref()?.base_buffer.clone())
1389 }
1390
1391 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1392 pub fn language(&self) -> Option<&Arc<Language>> {
1393 self.language.as_ref()
1394 }
1395
1396 /// Returns the [`Language`] at the given location.
1397 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1398 let offset = position.to_offset(self);
1399 let mut is_first = true;
1400 let start_anchor = self.anchor_before(offset);
1401 let end_anchor = self.anchor_after(offset);
1402 self.syntax_map
1403 .lock()
1404 .layers_for_range(offset..offset, &self.text, false)
1405 .filter(|layer| {
1406 if is_first {
1407 is_first = false;
1408 return true;
1409 }
1410
1411 layer
1412 .included_sub_ranges
1413 .map(|sub_ranges| {
1414 sub_ranges.iter().any(|sub_range| {
1415 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1416 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1417 !is_before_start && !is_after_end
1418 })
1419 })
1420 .unwrap_or(true)
1421 })
1422 .last()
1423 .map(|info| info.language.clone())
1424 .or_else(|| self.language.clone())
1425 }
1426
1427 /// Returns each [`Language`] for the active syntax layers at the given location.
1428 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1429 let offset = position.to_offset(self);
1430 let mut languages: Vec<Arc<Language>> = self
1431 .syntax_map
1432 .lock()
1433 .layers_for_range(offset..offset, &self.text, false)
1434 .map(|info| info.language.clone())
1435 .collect();
1436
1437 if languages.is_empty()
1438 && let Some(buffer_language) = self.language()
1439 {
1440 languages.push(buffer_language.clone());
1441 }
1442
1443 languages
1444 }
1445
1446 /// An integer version number that accounts for all updates besides
1447 /// the buffer's text itself (which is versioned via a version vector).
1448 pub fn non_text_state_update_count(&self) -> usize {
1449 self.non_text_state_update_count
1450 }
1451
1452 /// Whether the buffer is being parsed in the background.
1453 #[cfg(any(test, feature = "test-support"))]
1454 pub fn is_parsing(&self) -> bool {
1455 self.reparse.is_some()
1456 }
1457
1458 /// Indicates whether the buffer contains any regions that may be
1459 /// written in a language that hasn't been loaded yet.
1460 pub fn contains_unknown_injections(&self) -> bool {
1461 self.syntax_map.lock().contains_unknown_injections()
1462 }
1463
1464 #[cfg(any(test, feature = "test-support"))]
1465 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1466 self.sync_parse_timeout = timeout;
1467 }
1468
1469 /// Called after an edit to synchronize the buffer's main parse tree with
1470 /// the buffer's new underlying state.
1471 ///
1472 /// Locks the syntax map and interpolates the edits since the last reparse
1473 /// into the foreground syntax tree.
1474 ///
1475 /// Then takes a stable snapshot of the syntax map before unlocking it.
1476 /// The snapshot with the interpolated edits is sent to a background thread,
1477 /// where we ask Tree-sitter to perform an incremental parse.
1478 ///
1479 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1480 /// waiting on the parse to complete. As soon as it completes, we proceed
1481 /// synchronously, unless a 1ms timeout elapses.
1482 ///
1483 /// If we time out waiting on the parse, we spawn a second task waiting
1484 /// until the parse does complete and return with the interpolated tree still
1485 /// in the foreground. When the background parse completes, call back into
1486 /// the main thread and assign the foreground parse state.
1487 ///
1488 /// If the buffer or grammar changed since the start of the background parse,
1489 /// initiate an additional reparse recursively. To avoid concurrent parses
1490 /// for the same buffer, we only initiate a new parse if we are not already
1491 /// parsing in the background.
1492 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1493 if self.reparse.is_some() {
1494 return;
1495 }
1496 let language = if let Some(language) = self.language.clone() {
1497 language
1498 } else {
1499 return;
1500 };
1501
1502 let text = self.text_snapshot();
1503 let parsed_version = self.version();
1504
1505 let mut syntax_map = self.syntax_map.lock();
1506 syntax_map.interpolate(&text);
1507 let language_registry = syntax_map.language_registry();
1508 let mut syntax_snapshot = syntax_map.snapshot();
1509 drop(syntax_map);
1510
1511 let parse_task = cx.background_spawn({
1512 let language = language.clone();
1513 let language_registry = language_registry.clone();
1514 async move {
1515 syntax_snapshot.reparse(&text, language_registry, language);
1516 syntax_snapshot
1517 }
1518 });
1519
1520 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1521 match cx
1522 .background_executor()
1523 .block_with_timeout(self.sync_parse_timeout, parse_task)
1524 {
1525 Ok(new_syntax_snapshot) => {
1526 self.did_finish_parsing(new_syntax_snapshot, cx);
1527 self.reparse = None;
1528 }
1529 Err(parse_task) => {
1530 self.reparse = Some(cx.spawn(async move |this, cx| {
1531 let new_syntax_map = parse_task.await;
1532 this.update(cx, move |this, cx| {
1533 let grammar_changed =
1534 this.language.as_ref().is_none_or(|current_language| {
1535 !Arc::ptr_eq(&language, current_language)
1536 });
1537 let language_registry_changed = new_syntax_map
1538 .contains_unknown_injections()
1539 && language_registry.is_some_and(|registry| {
1540 registry.version() != new_syntax_map.language_registry_version()
1541 });
1542 let parse_again = language_registry_changed
1543 || grammar_changed
1544 || this.version.changed_since(&parsed_version);
1545 this.did_finish_parsing(new_syntax_map, cx);
1546 this.reparse = None;
1547 if parse_again {
1548 this.reparse(cx);
1549 }
1550 })
1551 .ok();
1552 }));
1553 }
1554 }
1555 }
1556
1557 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1558 self.was_changed();
1559 self.non_text_state_update_count += 1;
1560 self.syntax_map.lock().did_parse(syntax_snapshot);
1561 self.request_autoindent(cx);
1562 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1563 cx.emit(BufferEvent::Reparsed);
1564 cx.notify();
1565 }
1566
1567 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1568 self.parse_status.1.clone()
1569 }
1570
1571 /// Assign to the buffer a set of diagnostics created by a given language server.
1572 pub fn update_diagnostics(
1573 &mut self,
1574 server_id: LanguageServerId,
1575 diagnostics: DiagnosticSet,
1576 cx: &mut Context<Self>,
1577 ) {
1578 let lamport_timestamp = self.text.lamport_clock.tick();
1579 let op = Operation::UpdateDiagnostics {
1580 server_id,
1581 diagnostics: diagnostics.iter().cloned().collect(),
1582 lamport_timestamp,
1583 };
1584
1585 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1586 self.send_operation(op, true, cx);
1587 }
1588
1589 pub fn buffer_diagnostics(
1590 &self,
1591 for_server: Option<LanguageServerId>,
1592 ) -> Vec<&DiagnosticEntry<Anchor>> {
1593 match for_server {
1594 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1595 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1596 Err(_) => Vec::new(),
1597 },
1598 None => self
1599 .diagnostics
1600 .iter()
1601 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1602 .collect(),
1603 }
1604 }
1605
1606 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1607 if let Some(indent_sizes) = self.compute_autoindents() {
1608 let indent_sizes = cx.background_spawn(indent_sizes);
1609 match cx
1610 .background_executor()
1611 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1612 {
1613 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1614 Err(indent_sizes) => {
1615 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1616 let indent_sizes = indent_sizes.await;
1617 this.update(cx, |this, cx| {
1618 this.apply_autoindents(indent_sizes, cx);
1619 })
1620 .ok();
1621 }));
1622 }
1623 }
1624 } else {
1625 self.autoindent_requests.clear();
1626 for tx in self.wait_for_autoindent_txs.drain(..) {
1627 tx.send(()).ok();
1628 }
1629 }
1630 }
1631
1632 fn compute_autoindents(
1633 &self,
1634 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1635 let max_rows_between_yields = 100;
1636 let snapshot = self.snapshot();
1637 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1638 return None;
1639 }
1640
1641 let autoindent_requests = self.autoindent_requests.clone();
1642 Some(async move {
1643 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1644 for request in autoindent_requests {
1645 // Resolve each edited range to its row in the current buffer and in the
1646 // buffer before this batch of edits.
1647 let mut row_ranges = Vec::new();
1648 let mut old_to_new_rows = BTreeMap::new();
1649 let mut language_indent_sizes_by_new_row = Vec::new();
1650 for entry in &request.entries {
1651 let position = entry.range.start;
1652 let new_row = position.to_point(&snapshot).row;
1653 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1654 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1655
1656 if !entry.first_line_is_new {
1657 let old_row = position.to_point(&request.before_edit).row;
1658 old_to_new_rows.insert(old_row, new_row);
1659 }
1660 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1661 }
1662
1663 // Build a map containing the suggested indentation for each of the edited lines
1664 // with respect to the state of the buffer before these edits. This map is keyed
1665 // by the rows for these lines in the current state of the buffer.
1666 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1667 let old_edited_ranges =
1668 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1669 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1670 let mut language_indent_size = IndentSize::default();
1671 for old_edited_range in old_edited_ranges {
1672 let suggestions = request
1673 .before_edit
1674 .suggest_autoindents(old_edited_range.clone())
1675 .into_iter()
1676 .flatten();
1677 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1678 if let Some(suggestion) = suggestion {
1679 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1680
1681 // Find the indent size based on the language for this row.
1682 while let Some((row, size)) = language_indent_sizes.peek() {
1683 if *row > new_row {
1684 break;
1685 }
1686 language_indent_size = *size;
1687 language_indent_sizes.next();
1688 }
1689
1690 let suggested_indent = old_to_new_rows
1691 .get(&suggestion.basis_row)
1692 .and_then(|from_row| {
1693 Some(old_suggestions.get(from_row).copied()?.0)
1694 })
1695 .unwrap_or_else(|| {
1696 request
1697 .before_edit
1698 .indent_size_for_line(suggestion.basis_row)
1699 })
1700 .with_delta(suggestion.delta, language_indent_size);
1701 old_suggestions
1702 .insert(new_row, (suggested_indent, suggestion.within_error));
1703 }
1704 }
1705 yield_now().await;
1706 }
1707
1708 // Compute new suggestions for each line, but only include them in the result
1709 // if they differ from the old suggestion for that line.
1710 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1711 let mut language_indent_size = IndentSize::default();
1712 for (row_range, original_indent_column) in row_ranges {
1713 let new_edited_row_range = if request.is_block_mode {
1714 row_range.start..row_range.start + 1
1715 } else {
1716 row_range.clone()
1717 };
1718
1719 let suggestions = snapshot
1720 .suggest_autoindents(new_edited_row_range.clone())
1721 .into_iter()
1722 .flatten();
1723 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1724 if let Some(suggestion) = suggestion {
1725 // Find the indent size based on the language for this row.
1726 while let Some((row, size)) = language_indent_sizes.peek() {
1727 if *row > new_row {
1728 break;
1729 }
1730 language_indent_size = *size;
1731 language_indent_sizes.next();
1732 }
1733
1734 let suggested_indent = indent_sizes
1735 .get(&suggestion.basis_row)
1736 .copied()
1737 .map(|e| e.0)
1738 .unwrap_or_else(|| {
1739 snapshot.indent_size_for_line(suggestion.basis_row)
1740 })
1741 .with_delta(suggestion.delta, language_indent_size);
1742
1743 if old_suggestions.get(&new_row).is_none_or(
1744 |(old_indentation, was_within_error)| {
1745 suggested_indent != *old_indentation
1746 && (!suggestion.within_error || *was_within_error)
1747 },
1748 ) {
1749 indent_sizes.insert(
1750 new_row,
1751 (suggested_indent, request.ignore_empty_lines),
1752 );
1753 }
1754 }
1755 }
1756
1757 if let (true, Some(original_indent_column)) =
1758 (request.is_block_mode, original_indent_column)
1759 {
1760 let new_indent =
1761 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1762 *indent
1763 } else {
1764 snapshot.indent_size_for_line(row_range.start)
1765 };
1766 let delta = new_indent.len as i64 - original_indent_column as i64;
1767 if delta != 0 {
1768 for row in row_range.skip(1) {
1769 indent_sizes.entry(row).or_insert_with(|| {
1770 let mut size = snapshot.indent_size_for_line(row);
1771 if size.kind == new_indent.kind {
1772 match delta.cmp(&0) {
1773 Ordering::Greater => size.len += delta as u32,
1774 Ordering::Less => {
1775 size.len = size.len.saturating_sub(-delta as u32)
1776 }
1777 Ordering::Equal => {}
1778 }
1779 }
1780 (size, request.ignore_empty_lines)
1781 });
1782 }
1783 }
1784 }
1785
1786 yield_now().await;
1787 }
1788 }
1789
1790 indent_sizes
1791 .into_iter()
1792 .filter_map(|(row, (indent, ignore_empty_lines))| {
1793 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1794 None
1795 } else {
1796 Some((row, indent))
1797 }
1798 })
1799 .collect()
1800 })
1801 }
1802
1803 fn apply_autoindents(
1804 &mut self,
1805 indent_sizes: BTreeMap<u32, IndentSize>,
1806 cx: &mut Context<Self>,
1807 ) {
1808 self.autoindent_requests.clear();
1809 for tx in self.wait_for_autoindent_txs.drain(..) {
1810 tx.send(()).ok();
1811 }
1812
1813 let edits: Vec<_> = indent_sizes
1814 .into_iter()
1815 .filter_map(|(row, indent_size)| {
1816 let current_size = indent_size_for_line(self, row);
1817 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1818 })
1819 .collect();
1820
1821 let preserve_preview = self.preserve_preview();
1822 self.edit(edits, None, cx);
1823 if preserve_preview {
1824 self.refresh_preview();
1825 }
1826 }
1827
1828 /// Create a minimal edit that will cause the given row to be indented
1829 /// with the given size. After applying this edit, the length of the line
1830 /// will always be at least `new_size.len`.
1831 pub fn edit_for_indent_size_adjustment(
1832 row: u32,
1833 current_size: IndentSize,
1834 new_size: IndentSize,
1835 ) -> Option<(Range<Point>, String)> {
1836 if new_size.kind == current_size.kind {
1837 match new_size.len.cmp(¤t_size.len) {
1838 Ordering::Greater => {
1839 let point = Point::new(row, 0);
1840 Some((
1841 point..point,
1842 iter::repeat(new_size.char())
1843 .take((new_size.len - current_size.len) as usize)
1844 .collect::<String>(),
1845 ))
1846 }
1847
1848 Ordering::Less => Some((
1849 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1850 String::new(),
1851 )),
1852
1853 Ordering::Equal => None,
1854 }
1855 } else {
1856 Some((
1857 Point::new(row, 0)..Point::new(row, current_size.len),
1858 iter::repeat(new_size.char())
1859 .take(new_size.len as usize)
1860 .collect::<String>(),
1861 ))
1862 }
1863 }
1864
1865 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1866 /// and the given new text.
1867 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1868 let old_text = self.as_rope().clone();
1869 let base_version = self.version();
1870 cx.background_executor()
1871 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1872 let old_text = old_text.to_string();
1873 let line_ending = LineEnding::detect(&new_text);
1874 LineEnding::normalize(&mut new_text);
1875 let edits = text_diff(&old_text, &new_text);
1876 Diff {
1877 base_version,
1878 line_ending,
1879 edits,
1880 }
1881 })
1882 }
1883
1884 /// Spawns a background task that searches the buffer for any whitespace
1885 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1886 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1887 let old_text = self.as_rope().clone();
1888 let line_ending = self.line_ending();
1889 let base_version = self.version();
1890 cx.background_spawn(async move {
1891 let ranges = trailing_whitespace_ranges(&old_text);
1892 let empty = Arc::<str>::from("");
1893 Diff {
1894 base_version,
1895 line_ending,
1896 edits: ranges
1897 .into_iter()
1898 .map(|range| (range, empty.clone()))
1899 .collect(),
1900 }
1901 })
1902 }
1903
1904 /// Ensures that the buffer ends with a single newline character, and
1905 /// no other whitespace. Skips if the buffer is empty.
1906 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1907 let len = self.len();
1908 if len == 0 {
1909 return;
1910 }
1911 let mut offset = len;
1912 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1913 let non_whitespace_len = chunk
1914 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1915 .len();
1916 offset -= chunk.len();
1917 offset += non_whitespace_len;
1918 if non_whitespace_len != 0 {
1919 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1920 return;
1921 }
1922 break;
1923 }
1924 }
1925 self.edit([(offset..len, "\n")], None, cx);
1926 }
1927
1928 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1929 /// calculated, then adjust the diff to account for those changes, and discard any
1930 /// parts of the diff that conflict with those changes.
1931 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1932 let snapshot = self.snapshot();
1933 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1934 let mut delta = 0;
1935 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1936 while let Some(edit_since) = edits_since.peek() {
1937 // If the edit occurs after a diff hunk, then it does not
1938 // affect that hunk.
1939 if edit_since.old.start > range.end {
1940 break;
1941 }
1942 // If the edit precedes the diff hunk, then adjust the hunk
1943 // to reflect the edit.
1944 else if edit_since.old.end < range.start {
1945 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1946 edits_since.next();
1947 }
1948 // If the edit intersects a diff hunk, then discard that hunk.
1949 else {
1950 return None;
1951 }
1952 }
1953
1954 let start = (range.start as i64 + delta) as usize;
1955 let end = (range.end as i64 + delta) as usize;
1956 Some((start..end, new_text))
1957 });
1958
1959 self.start_transaction();
1960 self.text.set_line_ending(diff.line_ending);
1961 self.edit(adjusted_edits, None, cx);
1962 self.end_transaction(cx)
1963 }
1964
1965 fn has_unsaved_edits(&self) -> bool {
1966 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1967
1968 if last_version == self.version {
1969 self.has_unsaved_edits
1970 .set((last_version, has_unsaved_edits));
1971 return has_unsaved_edits;
1972 }
1973
1974 let has_edits = self.has_edits_since(&self.saved_version);
1975 self.has_unsaved_edits
1976 .set((self.version.clone(), has_edits));
1977 has_edits
1978 }
1979
1980 /// Checks if the buffer has unsaved changes.
1981 pub fn is_dirty(&self) -> bool {
1982 if self.capability == Capability::ReadOnly {
1983 return false;
1984 }
1985 if self.has_conflict {
1986 return true;
1987 }
1988 match self.file.as_ref().map(|f| f.disk_state()) {
1989 Some(DiskState::New) | Some(DiskState::Deleted) => {
1990 !self.is_empty() && self.has_unsaved_edits()
1991 }
1992 _ => self.has_unsaved_edits(),
1993 }
1994 }
1995
1996 /// Checks if the buffer and its file have both changed since the buffer
1997 /// was last saved or reloaded.
1998 pub fn has_conflict(&self) -> bool {
1999 if self.has_conflict {
2000 return true;
2001 }
2002 let Some(file) = self.file.as_ref() else {
2003 return false;
2004 };
2005 match file.disk_state() {
2006 DiskState::New => false,
2007 DiskState::Present { mtime } => match self.saved_mtime {
2008 Some(saved_mtime) => {
2009 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2010 }
2011 None => true,
2012 },
2013 DiskState::Deleted => false,
2014 }
2015 }
2016
2017 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2018 pub fn subscribe(&mut self) -> Subscription {
2019 self.text.subscribe()
2020 }
2021
2022 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2023 ///
2024 /// This allows downstream code to check if the buffer's text has changed without
2025 /// waiting for an effect cycle, which would be required if using eents.
2026 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2027 if let Err(ix) = self
2028 .change_bits
2029 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2030 {
2031 self.change_bits.insert(ix, bit);
2032 }
2033 }
2034
2035 fn was_changed(&mut self) {
2036 self.change_bits.retain(|change_bit| {
2037 change_bit.upgrade().is_some_and(|bit| {
2038 bit.replace(true);
2039 true
2040 })
2041 });
2042 }
2043
2044 /// Starts a transaction, if one is not already in-progress. When undoing or
2045 /// redoing edits, all of the edits performed within a transaction are undone
2046 /// or redone together.
2047 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2048 self.start_transaction_at(Instant::now())
2049 }
2050
2051 /// Starts a transaction, providing the current time. Subsequent transactions
2052 /// that occur within a short period of time will be grouped together. This
2053 /// is controlled by the buffer's undo grouping duration.
2054 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2055 self.transaction_depth += 1;
2056 if self.was_dirty_before_starting_transaction.is_none() {
2057 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2058 }
2059 self.text.start_transaction_at(now)
2060 }
2061
2062 /// Terminates the current transaction, if this is the outermost transaction.
2063 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2064 self.end_transaction_at(Instant::now(), cx)
2065 }
2066
2067 /// Terminates the current transaction, providing the current time. Subsequent transactions
2068 /// that occur within a short period of time will be grouped together. This
2069 /// is controlled by the buffer's undo grouping duration.
2070 pub fn end_transaction_at(
2071 &mut self,
2072 now: Instant,
2073 cx: &mut Context<Self>,
2074 ) -> Option<TransactionId> {
2075 assert!(self.transaction_depth > 0);
2076 self.transaction_depth -= 1;
2077 let was_dirty = if self.transaction_depth == 0 {
2078 self.was_dirty_before_starting_transaction.take().unwrap()
2079 } else {
2080 false
2081 };
2082 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2083 self.did_edit(&start_version, was_dirty, cx);
2084 Some(transaction_id)
2085 } else {
2086 None
2087 }
2088 }
2089
2090 /// Manually add a transaction to the buffer's undo history.
2091 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2092 self.text.push_transaction(transaction, now);
2093 }
2094
2095 /// Differs from `push_transaction` in that it does not clear the redo
2096 /// stack. Intended to be used to create a parent transaction to merge
2097 /// potential child transactions into.
2098 ///
2099 /// The caller is responsible for removing it from the undo history using
2100 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2101 /// are merged into this transaction, the caller is responsible for ensuring
2102 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2103 /// cleared is to create transactions with the usual `start_transaction` and
2104 /// `end_transaction` methods and merging the resulting transactions into
2105 /// the transaction created by this method
2106 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2107 self.text.push_empty_transaction(now)
2108 }
2109
2110 /// Prevent the last transaction from being grouped with any subsequent transactions,
2111 /// even if they occur with the buffer's undo grouping duration.
2112 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2113 self.text.finalize_last_transaction()
2114 }
2115
2116 /// Manually group all changes since a given transaction.
2117 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2118 self.text.group_until_transaction(transaction_id);
2119 }
2120
2121 /// Manually remove a transaction from the buffer's undo history
2122 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2123 self.text.forget_transaction(transaction_id)
2124 }
2125
2126 /// Retrieve a transaction from the buffer's undo history
2127 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2128 self.text.get_transaction(transaction_id)
2129 }
2130
2131 /// Manually merge two transactions in the buffer's undo history.
2132 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2133 self.text.merge_transactions(transaction, destination);
2134 }
2135
2136 /// Waits for the buffer to receive operations with the given timestamps.
2137 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2138 &mut self,
2139 edit_ids: It,
2140 ) -> impl Future<Output = Result<()>> + use<It> {
2141 self.text.wait_for_edits(edit_ids)
2142 }
2143
2144 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2145 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2146 &mut self,
2147 anchors: It,
2148 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2149 self.text.wait_for_anchors(anchors)
2150 }
2151
2152 /// Waits for the buffer to receive operations up to the given version.
2153 pub fn wait_for_version(
2154 &mut self,
2155 version: clock::Global,
2156 ) -> impl Future<Output = Result<()>> + use<> {
2157 self.text.wait_for_version(version)
2158 }
2159
2160 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2161 /// [`Buffer::wait_for_version`] to resolve with an error.
2162 pub fn give_up_waiting(&mut self) {
2163 self.text.give_up_waiting();
2164 }
2165
2166 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2167 let mut rx = None;
2168 if !self.autoindent_requests.is_empty() {
2169 let channel = oneshot::channel();
2170 self.wait_for_autoindent_txs.push(channel.0);
2171 rx = Some(channel.1);
2172 }
2173 rx
2174 }
2175
2176 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2177 pub fn set_active_selections(
2178 &mut self,
2179 selections: Arc<[Selection<Anchor>]>,
2180 line_mode: bool,
2181 cursor_shape: CursorShape,
2182 cx: &mut Context<Self>,
2183 ) {
2184 let lamport_timestamp = self.text.lamport_clock.tick();
2185 self.remote_selections.insert(
2186 self.text.replica_id(),
2187 SelectionSet {
2188 selections: selections.clone(),
2189 lamport_timestamp,
2190 line_mode,
2191 cursor_shape,
2192 },
2193 );
2194 self.send_operation(
2195 Operation::UpdateSelections {
2196 selections,
2197 line_mode,
2198 lamport_timestamp,
2199 cursor_shape,
2200 },
2201 true,
2202 cx,
2203 );
2204 self.non_text_state_update_count += 1;
2205 cx.notify();
2206 }
2207
2208 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2209 /// this replica.
2210 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2211 if self
2212 .remote_selections
2213 .get(&self.text.replica_id())
2214 .is_none_or(|set| !set.selections.is_empty())
2215 {
2216 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2217 }
2218 }
2219
2220 pub fn set_agent_selections(
2221 &mut self,
2222 selections: Arc<[Selection<Anchor>]>,
2223 line_mode: bool,
2224 cursor_shape: CursorShape,
2225 cx: &mut Context<Self>,
2226 ) {
2227 let lamport_timestamp = self.text.lamport_clock.tick();
2228 self.remote_selections.insert(
2229 AGENT_REPLICA_ID,
2230 SelectionSet {
2231 selections,
2232 lamport_timestamp,
2233 line_mode,
2234 cursor_shape,
2235 },
2236 );
2237 self.non_text_state_update_count += 1;
2238 cx.notify();
2239 }
2240
2241 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2242 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2243 }
2244
2245 /// Replaces the buffer's entire text.
2246 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2247 where
2248 T: Into<Arc<str>>,
2249 {
2250 self.autoindent_requests.clear();
2251 self.edit([(0..self.len(), text)], None, cx)
2252 }
2253
2254 /// Appends the given text to the end of the buffer.
2255 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2256 where
2257 T: Into<Arc<str>>,
2258 {
2259 self.edit([(self.len()..self.len(), text)], None, cx)
2260 }
2261
2262 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2263 /// delete, and a string of text to insert at that location.
2264 ///
2265 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2266 /// request for the edited ranges, which will be processed when the buffer finishes
2267 /// parsing.
2268 ///
2269 /// Parsing takes place at the end of a transaction, and may compute synchronously
2270 /// or asynchronously, depending on the changes.
2271 pub fn edit<I, S, T>(
2272 &mut self,
2273 edits_iter: I,
2274 autoindent_mode: Option<AutoindentMode>,
2275 cx: &mut Context<Self>,
2276 ) -> Option<clock::Lamport>
2277 where
2278 I: IntoIterator<Item = (Range<S>, T)>,
2279 S: ToOffset,
2280 T: Into<Arc<str>>,
2281 {
2282 // Skip invalid edits and coalesce contiguous ones.
2283 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2284
2285 for (range, new_text) in edits_iter {
2286 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2287
2288 if range.start > range.end {
2289 mem::swap(&mut range.start, &mut range.end);
2290 }
2291 let new_text = new_text.into();
2292 if !new_text.is_empty() || !range.is_empty() {
2293 if let Some((prev_range, prev_text)) = edits.last_mut()
2294 && prev_range.end >= range.start
2295 {
2296 prev_range.end = cmp::max(prev_range.end, range.end);
2297 *prev_text = format!("{prev_text}{new_text}").into();
2298 } else {
2299 edits.push((range, new_text));
2300 }
2301 }
2302 }
2303 if edits.is_empty() {
2304 return None;
2305 }
2306
2307 self.start_transaction();
2308 self.pending_autoindent.take();
2309 let autoindent_request = autoindent_mode
2310 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2311
2312 let edit_operation = self.text.edit(edits.iter().cloned());
2313 let edit_id = edit_operation.timestamp();
2314
2315 if let Some((before_edit, mode)) = autoindent_request {
2316 let mut delta = 0isize;
2317 let mut previous_setting = None;
2318 let entries: Vec<_> = edits
2319 .into_iter()
2320 .enumerate()
2321 .zip(&edit_operation.as_edit().unwrap().new_text)
2322 .filter(|((_, (range, _)), _)| {
2323 let language = before_edit.language_at(range.start);
2324 let language_id = language.map(|l| l.id());
2325 if let Some((cached_language_id, auto_indent)) = previous_setting
2326 && cached_language_id == language_id
2327 {
2328 auto_indent
2329 } else {
2330 // The auto-indent setting is not present in editorconfigs, hence
2331 // we can avoid passing the file here.
2332 let auto_indent =
2333 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2334 previous_setting = Some((language_id, auto_indent));
2335 auto_indent
2336 }
2337 })
2338 .map(|((ix, (range, _)), new_text)| {
2339 let new_text_length = new_text.len();
2340 let old_start = range.start.to_point(&before_edit);
2341 let new_start = (delta + range.start as isize) as usize;
2342 let range_len = range.end - range.start;
2343 delta += new_text_length as isize - range_len as isize;
2344
2345 // Decide what range of the insertion to auto-indent, and whether
2346 // the first line of the insertion should be considered a newly-inserted line
2347 // or an edit to an existing line.
2348 let mut range_of_insertion_to_indent = 0..new_text_length;
2349 let mut first_line_is_new = true;
2350
2351 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2352 let old_line_end = before_edit.line_len(old_start.row);
2353
2354 if old_start.column > old_line_start {
2355 first_line_is_new = false;
2356 }
2357
2358 if !new_text.contains('\n')
2359 && (old_start.column + (range_len as u32) < old_line_end
2360 || old_line_end == old_line_start)
2361 {
2362 first_line_is_new = false;
2363 }
2364
2365 // When inserting text starting with a newline, avoid auto-indenting the
2366 // previous line.
2367 if new_text.starts_with('\n') {
2368 range_of_insertion_to_indent.start += 1;
2369 first_line_is_new = true;
2370 }
2371
2372 let mut original_indent_column = None;
2373 if let AutoindentMode::Block {
2374 original_indent_columns,
2375 } = &mode
2376 {
2377 original_indent_column = Some(if new_text.starts_with('\n') {
2378 indent_size_for_text(
2379 new_text[range_of_insertion_to_indent.clone()].chars(),
2380 )
2381 .len
2382 } else {
2383 original_indent_columns
2384 .get(ix)
2385 .copied()
2386 .flatten()
2387 .unwrap_or_else(|| {
2388 indent_size_for_text(
2389 new_text[range_of_insertion_to_indent.clone()].chars(),
2390 )
2391 .len
2392 })
2393 });
2394
2395 // Avoid auto-indenting the line after the edit.
2396 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2397 range_of_insertion_to_indent.end -= 1;
2398 }
2399 }
2400
2401 AutoindentRequestEntry {
2402 first_line_is_new,
2403 original_indent_column,
2404 indent_size: before_edit.language_indent_size_at(range.start, cx),
2405 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2406 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2407 }
2408 })
2409 .collect();
2410
2411 if !entries.is_empty() {
2412 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2413 before_edit,
2414 entries,
2415 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2416 ignore_empty_lines: false,
2417 }));
2418 }
2419 }
2420
2421 self.end_transaction(cx);
2422 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2423 Some(edit_id)
2424 }
2425
2426 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2427 self.was_changed();
2428
2429 if self.edits_since::<usize>(old_version).next().is_none() {
2430 return;
2431 }
2432
2433 self.reparse(cx);
2434 cx.emit(BufferEvent::Edited);
2435 if was_dirty != self.is_dirty() {
2436 cx.emit(BufferEvent::DirtyChanged);
2437 }
2438 cx.notify();
2439 }
2440
2441 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2442 where
2443 I: IntoIterator<Item = Range<T>>,
2444 T: ToOffset + Copy,
2445 {
2446 let before_edit = self.snapshot();
2447 let entries = ranges
2448 .into_iter()
2449 .map(|range| AutoindentRequestEntry {
2450 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2451 first_line_is_new: true,
2452 indent_size: before_edit.language_indent_size_at(range.start, cx),
2453 original_indent_column: None,
2454 })
2455 .collect();
2456 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2457 before_edit,
2458 entries,
2459 is_block_mode: false,
2460 ignore_empty_lines: true,
2461 }));
2462 self.request_autoindent(cx);
2463 }
2464
2465 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2466 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2467 pub fn insert_empty_line(
2468 &mut self,
2469 position: impl ToPoint,
2470 space_above: bool,
2471 space_below: bool,
2472 cx: &mut Context<Self>,
2473 ) -> Point {
2474 let mut position = position.to_point(self);
2475
2476 self.start_transaction();
2477
2478 self.edit(
2479 [(position..position, "\n")],
2480 Some(AutoindentMode::EachLine),
2481 cx,
2482 );
2483
2484 if position.column > 0 {
2485 position += Point::new(1, 0);
2486 }
2487
2488 if !self.is_line_blank(position.row) {
2489 self.edit(
2490 [(position..position, "\n")],
2491 Some(AutoindentMode::EachLine),
2492 cx,
2493 );
2494 }
2495
2496 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2497 self.edit(
2498 [(position..position, "\n")],
2499 Some(AutoindentMode::EachLine),
2500 cx,
2501 );
2502 position.row += 1;
2503 }
2504
2505 if space_below
2506 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2507 {
2508 self.edit(
2509 [(position..position, "\n")],
2510 Some(AutoindentMode::EachLine),
2511 cx,
2512 );
2513 }
2514
2515 self.end_transaction(cx);
2516
2517 position
2518 }
2519
2520 /// Applies the given remote operations to the buffer.
2521 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2522 self.pending_autoindent.take();
2523 let was_dirty = self.is_dirty();
2524 let old_version = self.version.clone();
2525 let mut deferred_ops = Vec::new();
2526 let buffer_ops = ops
2527 .into_iter()
2528 .filter_map(|op| match op {
2529 Operation::Buffer(op) => Some(op),
2530 _ => {
2531 if self.can_apply_op(&op) {
2532 self.apply_op(op, cx);
2533 } else {
2534 deferred_ops.push(op);
2535 }
2536 None
2537 }
2538 })
2539 .collect::<Vec<_>>();
2540 for operation in buffer_ops.iter() {
2541 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2542 }
2543 self.text.apply_ops(buffer_ops);
2544 self.deferred_ops.insert(deferred_ops);
2545 self.flush_deferred_ops(cx);
2546 self.did_edit(&old_version, was_dirty, cx);
2547 // Notify independently of whether the buffer was edited as the operations could include a
2548 // selection update.
2549 cx.notify();
2550 }
2551
2552 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2553 let mut deferred_ops = Vec::new();
2554 for op in self.deferred_ops.drain().iter().cloned() {
2555 if self.can_apply_op(&op) {
2556 self.apply_op(op, cx);
2557 } else {
2558 deferred_ops.push(op);
2559 }
2560 }
2561 self.deferred_ops.insert(deferred_ops);
2562 }
2563
2564 pub fn has_deferred_ops(&self) -> bool {
2565 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2566 }
2567
2568 fn can_apply_op(&self, operation: &Operation) -> bool {
2569 match operation {
2570 Operation::Buffer(_) => {
2571 unreachable!("buffer operations should never be applied at this layer")
2572 }
2573 Operation::UpdateDiagnostics {
2574 diagnostics: diagnostic_set,
2575 ..
2576 } => diagnostic_set.iter().all(|diagnostic| {
2577 self.text.can_resolve(&diagnostic.range.start)
2578 && self.text.can_resolve(&diagnostic.range.end)
2579 }),
2580 Operation::UpdateSelections { selections, .. } => selections
2581 .iter()
2582 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2583 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2584 }
2585 }
2586
2587 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2588 match operation {
2589 Operation::Buffer(_) => {
2590 unreachable!("buffer operations should never be applied at this layer")
2591 }
2592 Operation::UpdateDiagnostics {
2593 server_id,
2594 diagnostics: diagnostic_set,
2595 lamport_timestamp,
2596 } => {
2597 let snapshot = self.snapshot();
2598 self.apply_diagnostic_update(
2599 server_id,
2600 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2601 lamport_timestamp,
2602 cx,
2603 );
2604 }
2605 Operation::UpdateSelections {
2606 selections,
2607 lamport_timestamp,
2608 line_mode,
2609 cursor_shape,
2610 } => {
2611 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2612 && set.lamport_timestamp > lamport_timestamp
2613 {
2614 return;
2615 }
2616
2617 self.remote_selections.insert(
2618 lamport_timestamp.replica_id,
2619 SelectionSet {
2620 selections,
2621 lamport_timestamp,
2622 line_mode,
2623 cursor_shape,
2624 },
2625 );
2626 self.text.lamport_clock.observe(lamport_timestamp);
2627 self.non_text_state_update_count += 1;
2628 }
2629 Operation::UpdateCompletionTriggers {
2630 triggers,
2631 lamport_timestamp,
2632 server_id,
2633 } => {
2634 if triggers.is_empty() {
2635 self.completion_triggers_per_language_server
2636 .remove(&server_id);
2637 self.completion_triggers = self
2638 .completion_triggers_per_language_server
2639 .values()
2640 .flat_map(|triggers| triggers.iter().cloned())
2641 .collect();
2642 } else {
2643 self.completion_triggers_per_language_server
2644 .insert(server_id, triggers.iter().cloned().collect());
2645 self.completion_triggers.extend(triggers);
2646 }
2647 self.text.lamport_clock.observe(lamport_timestamp);
2648 }
2649 Operation::UpdateLineEnding {
2650 line_ending,
2651 lamport_timestamp,
2652 } => {
2653 self.text.set_line_ending(line_ending);
2654 self.text.lamport_clock.observe(lamport_timestamp);
2655 }
2656 }
2657 }
2658
2659 fn apply_diagnostic_update(
2660 &mut self,
2661 server_id: LanguageServerId,
2662 diagnostics: DiagnosticSet,
2663 lamport_timestamp: clock::Lamport,
2664 cx: &mut Context<Self>,
2665 ) {
2666 if lamport_timestamp > self.diagnostics_timestamp {
2667 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2668 if diagnostics.is_empty() {
2669 if let Ok(ix) = ix {
2670 self.diagnostics.remove(ix);
2671 }
2672 } else {
2673 match ix {
2674 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2675 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2676 };
2677 }
2678 self.diagnostics_timestamp = lamport_timestamp;
2679 self.non_text_state_update_count += 1;
2680 self.text.lamport_clock.observe(lamport_timestamp);
2681 cx.notify();
2682 cx.emit(BufferEvent::DiagnosticsUpdated);
2683 }
2684 }
2685
2686 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2687 self.was_changed();
2688 cx.emit(BufferEvent::Operation {
2689 operation,
2690 is_local,
2691 });
2692 }
2693
2694 /// Removes the selections for a given peer.
2695 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2696 self.remote_selections.remove(&replica_id);
2697 cx.notify();
2698 }
2699
2700 /// Undoes the most recent transaction.
2701 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2702 let was_dirty = self.is_dirty();
2703 let old_version = self.version.clone();
2704
2705 if let Some((transaction_id, operation)) = self.text.undo() {
2706 self.send_operation(Operation::Buffer(operation), true, cx);
2707 self.did_edit(&old_version, was_dirty, cx);
2708 Some(transaction_id)
2709 } else {
2710 None
2711 }
2712 }
2713
2714 /// Manually undoes a specific transaction in the buffer's undo history.
2715 pub fn undo_transaction(
2716 &mut self,
2717 transaction_id: TransactionId,
2718 cx: &mut Context<Self>,
2719 ) -> bool {
2720 let was_dirty = self.is_dirty();
2721 let old_version = self.version.clone();
2722 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2723 self.send_operation(Operation::Buffer(operation), true, cx);
2724 self.did_edit(&old_version, was_dirty, cx);
2725 true
2726 } else {
2727 false
2728 }
2729 }
2730
2731 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2732 pub fn undo_to_transaction(
2733 &mut self,
2734 transaction_id: TransactionId,
2735 cx: &mut Context<Self>,
2736 ) -> bool {
2737 let was_dirty = self.is_dirty();
2738 let old_version = self.version.clone();
2739
2740 let operations = self.text.undo_to_transaction(transaction_id);
2741 let undone = !operations.is_empty();
2742 for operation in operations {
2743 self.send_operation(Operation::Buffer(operation), true, cx);
2744 }
2745 if undone {
2746 self.did_edit(&old_version, was_dirty, cx)
2747 }
2748 undone
2749 }
2750
2751 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2752 let was_dirty = self.is_dirty();
2753 let operation = self.text.undo_operations(counts);
2754 let old_version = self.version.clone();
2755 self.send_operation(Operation::Buffer(operation), true, cx);
2756 self.did_edit(&old_version, was_dirty, cx);
2757 }
2758
2759 /// Manually redoes a specific transaction in the buffer's redo history.
2760 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2761 let was_dirty = self.is_dirty();
2762 let old_version = self.version.clone();
2763
2764 if let Some((transaction_id, operation)) = self.text.redo() {
2765 self.send_operation(Operation::Buffer(operation), true, cx);
2766 self.did_edit(&old_version, was_dirty, cx);
2767 Some(transaction_id)
2768 } else {
2769 None
2770 }
2771 }
2772
2773 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2774 pub fn redo_to_transaction(
2775 &mut self,
2776 transaction_id: TransactionId,
2777 cx: &mut Context<Self>,
2778 ) -> bool {
2779 let was_dirty = self.is_dirty();
2780 let old_version = self.version.clone();
2781
2782 let operations = self.text.redo_to_transaction(transaction_id);
2783 let redone = !operations.is_empty();
2784 for operation in operations {
2785 self.send_operation(Operation::Buffer(operation), true, cx);
2786 }
2787 if redone {
2788 self.did_edit(&old_version, was_dirty, cx)
2789 }
2790 redone
2791 }
2792
2793 /// Override current completion triggers with the user-provided completion triggers.
2794 pub fn set_completion_triggers(
2795 &mut self,
2796 server_id: LanguageServerId,
2797 triggers: BTreeSet<String>,
2798 cx: &mut Context<Self>,
2799 ) {
2800 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2801 if triggers.is_empty() {
2802 self.completion_triggers_per_language_server
2803 .remove(&server_id);
2804 self.completion_triggers = self
2805 .completion_triggers_per_language_server
2806 .values()
2807 .flat_map(|triggers| triggers.iter().cloned())
2808 .collect();
2809 } else {
2810 self.completion_triggers_per_language_server
2811 .insert(server_id, triggers.clone());
2812 self.completion_triggers.extend(triggers.iter().cloned());
2813 }
2814 self.send_operation(
2815 Operation::UpdateCompletionTriggers {
2816 triggers: triggers.into_iter().collect(),
2817 lamport_timestamp: self.completion_triggers_timestamp,
2818 server_id,
2819 },
2820 true,
2821 cx,
2822 );
2823 cx.notify();
2824 }
2825
2826 /// Returns a list of strings which trigger a completion menu for this language.
2827 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2828 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2829 &self.completion_triggers
2830 }
2831
2832 /// Call this directly after performing edits to prevent the preview tab
2833 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2834 /// to return false until there are additional edits.
2835 pub fn refresh_preview(&mut self) {
2836 self.preview_version = self.version.clone();
2837 }
2838
2839 /// Whether we should preserve the preview status of a tab containing this buffer.
2840 pub fn preserve_preview(&self) -> bool {
2841 !self.has_edits_since(&self.preview_version)
2842 }
2843}
2844
2845#[doc(hidden)]
2846#[cfg(any(test, feature = "test-support"))]
2847impl Buffer {
2848 pub fn edit_via_marked_text(
2849 &mut self,
2850 marked_string: &str,
2851 autoindent_mode: Option<AutoindentMode>,
2852 cx: &mut Context<Self>,
2853 ) {
2854 let edits = self.edits_for_marked_text(marked_string);
2855 self.edit(edits, autoindent_mode, cx);
2856 }
2857
2858 pub fn set_group_interval(&mut self, group_interval: Duration) {
2859 self.text.set_group_interval(group_interval);
2860 }
2861
2862 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2863 where
2864 T: rand::Rng,
2865 {
2866 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2867 let mut last_end = None;
2868 for _ in 0..old_range_count {
2869 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2870 break;
2871 }
2872
2873 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2874 let mut range = self.random_byte_range(new_start, rng);
2875 if rng.random_bool(0.2) {
2876 mem::swap(&mut range.start, &mut range.end);
2877 }
2878 last_end = Some(range.end);
2879
2880 let new_text_len = rng.random_range(0..10);
2881 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2882 new_text = new_text.to_uppercase();
2883
2884 edits.push((range, new_text));
2885 }
2886 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2887 self.edit(edits, None, cx);
2888 }
2889
2890 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2891 let was_dirty = self.is_dirty();
2892 let old_version = self.version.clone();
2893
2894 let ops = self.text.randomly_undo_redo(rng);
2895 if !ops.is_empty() {
2896 for op in ops {
2897 self.send_operation(Operation::Buffer(op), true, cx);
2898 self.did_edit(&old_version, was_dirty, cx);
2899 }
2900 }
2901 }
2902}
2903
2904impl EventEmitter<BufferEvent> for Buffer {}
2905
2906impl Deref for Buffer {
2907 type Target = TextBuffer;
2908
2909 fn deref(&self) -> &Self::Target {
2910 &self.text
2911 }
2912}
2913
2914impl BufferSnapshot {
2915 /// Returns [`IndentSize`] for a given line that respects user settings and
2916 /// language preferences.
2917 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2918 indent_size_for_line(self, row)
2919 }
2920
2921 /// Returns [`IndentSize`] for a given position that respects user settings
2922 /// and language preferences.
2923 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2924 let settings = language_settings(
2925 self.language_at(position).map(|l| l.name()),
2926 self.file(),
2927 cx,
2928 );
2929 if settings.hard_tabs {
2930 IndentSize::tab()
2931 } else {
2932 IndentSize::spaces(settings.tab_size.get())
2933 }
2934 }
2935
2936 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2937 /// is passed in as `single_indent_size`.
2938 pub fn suggested_indents(
2939 &self,
2940 rows: impl Iterator<Item = u32>,
2941 single_indent_size: IndentSize,
2942 ) -> BTreeMap<u32, IndentSize> {
2943 let mut result = BTreeMap::new();
2944
2945 for row_range in contiguous_ranges(rows, 10) {
2946 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2947 Some(suggestions) => suggestions,
2948 _ => break,
2949 };
2950
2951 for (row, suggestion) in row_range.zip(suggestions) {
2952 let indent_size = if let Some(suggestion) = suggestion {
2953 result
2954 .get(&suggestion.basis_row)
2955 .copied()
2956 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2957 .with_delta(suggestion.delta, single_indent_size)
2958 } else {
2959 self.indent_size_for_line(row)
2960 };
2961
2962 result.insert(row, indent_size);
2963 }
2964 }
2965
2966 result
2967 }
2968
2969 fn suggest_autoindents(
2970 &self,
2971 row_range: Range<u32>,
2972 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2973 let config = &self.language.as_ref()?.config;
2974 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2975
2976 #[derive(Debug, Clone)]
2977 struct StartPosition {
2978 start: Point,
2979 suffix: SharedString,
2980 }
2981
2982 // Find the suggested indentation ranges based on the syntax tree.
2983 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2984 let end = Point::new(row_range.end, 0);
2985 let range = (start..end).to_offset(&self.text);
2986 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2987 Some(&grammar.indents_config.as_ref()?.query)
2988 });
2989 let indent_configs = matches
2990 .grammars()
2991 .iter()
2992 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2993 .collect::<Vec<_>>();
2994
2995 let mut indent_ranges = Vec::<Range<Point>>::new();
2996 let mut start_positions = Vec::<StartPosition>::new();
2997 let mut outdent_positions = Vec::<Point>::new();
2998 while let Some(mat) = matches.peek() {
2999 let mut start: Option<Point> = None;
3000 let mut end: Option<Point> = None;
3001
3002 let config = indent_configs[mat.grammar_index];
3003 for capture in mat.captures {
3004 if capture.index == config.indent_capture_ix {
3005 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3006 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3007 } else if Some(capture.index) == config.start_capture_ix {
3008 start = Some(Point::from_ts_point(capture.node.end_position()));
3009 } else if Some(capture.index) == config.end_capture_ix {
3010 end = Some(Point::from_ts_point(capture.node.start_position()));
3011 } else if Some(capture.index) == config.outdent_capture_ix {
3012 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3013 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3014 start_positions.push(StartPosition {
3015 start: Point::from_ts_point(capture.node.start_position()),
3016 suffix: suffix.clone(),
3017 });
3018 }
3019 }
3020
3021 matches.advance();
3022 if let Some((start, end)) = start.zip(end) {
3023 if start.row == end.row {
3024 continue;
3025 }
3026 let range = start..end;
3027 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3028 Err(ix) => indent_ranges.insert(ix, range),
3029 Ok(ix) => {
3030 let prev_range = &mut indent_ranges[ix];
3031 prev_range.end = prev_range.end.max(range.end);
3032 }
3033 }
3034 }
3035 }
3036
3037 let mut error_ranges = Vec::<Range<Point>>::new();
3038 let mut matches = self
3039 .syntax
3040 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3041 while let Some(mat) = matches.peek() {
3042 let node = mat.captures[0].node;
3043 let start = Point::from_ts_point(node.start_position());
3044 let end = Point::from_ts_point(node.end_position());
3045 let range = start..end;
3046 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3047 Ok(ix) | Err(ix) => ix,
3048 };
3049 let mut end_ix = ix;
3050 while let Some(existing_range) = error_ranges.get(end_ix) {
3051 if existing_range.end < end {
3052 end_ix += 1;
3053 } else {
3054 break;
3055 }
3056 }
3057 error_ranges.splice(ix..end_ix, [range]);
3058 matches.advance();
3059 }
3060
3061 outdent_positions.sort();
3062 for outdent_position in outdent_positions {
3063 // find the innermost indent range containing this outdent_position
3064 // set its end to the outdent position
3065 if let Some(range_to_truncate) = indent_ranges
3066 .iter_mut()
3067 .filter(|indent_range| indent_range.contains(&outdent_position))
3068 .next_back()
3069 {
3070 range_to_truncate.end = outdent_position;
3071 }
3072 }
3073
3074 start_positions.sort_by_key(|b| b.start);
3075
3076 // Find the suggested indentation increases and decreased based on regexes.
3077 let mut regex_outdent_map = HashMap::default();
3078 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3079 let mut start_positions_iter = start_positions.iter().peekable();
3080
3081 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3082 self.for_each_line(
3083 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3084 ..Point::new(row_range.end, 0),
3085 |row, line| {
3086 if config
3087 .decrease_indent_pattern
3088 .as_ref()
3089 .is_some_and(|regex| regex.is_match(line))
3090 {
3091 indent_change_rows.push((row, Ordering::Less));
3092 }
3093 if config
3094 .increase_indent_pattern
3095 .as_ref()
3096 .is_some_and(|regex| regex.is_match(line))
3097 {
3098 indent_change_rows.push((row + 1, Ordering::Greater));
3099 }
3100 while let Some(pos) = start_positions_iter.peek() {
3101 if pos.start.row < row {
3102 let pos = start_positions_iter.next().unwrap();
3103 last_seen_suffix
3104 .entry(pos.suffix.to_string())
3105 .or_default()
3106 .push(pos.start);
3107 } else {
3108 break;
3109 }
3110 }
3111 for rule in &config.decrease_indent_patterns {
3112 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3113 let row_start_column = self.indent_size_for_line(row).len;
3114 let basis_row = rule
3115 .valid_after
3116 .iter()
3117 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3118 .flatten()
3119 .filter(|start_point| start_point.column <= row_start_column)
3120 .max_by_key(|start_point| start_point.row);
3121 if let Some(outdent_to_row) = basis_row {
3122 regex_outdent_map.insert(row, outdent_to_row.row);
3123 }
3124 break;
3125 }
3126 }
3127 },
3128 );
3129
3130 let mut indent_changes = indent_change_rows.into_iter().peekable();
3131 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3132 prev_non_blank_row.unwrap_or(0)
3133 } else {
3134 row_range.start.saturating_sub(1)
3135 };
3136
3137 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3138 Some(row_range.map(move |row| {
3139 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3140
3141 let mut indent_from_prev_row = false;
3142 let mut outdent_from_prev_row = false;
3143 let mut outdent_to_row = u32::MAX;
3144 let mut from_regex = false;
3145
3146 while let Some((indent_row, delta)) = indent_changes.peek() {
3147 match indent_row.cmp(&row) {
3148 Ordering::Equal => match delta {
3149 Ordering::Less => {
3150 from_regex = true;
3151 outdent_from_prev_row = true
3152 }
3153 Ordering::Greater => {
3154 indent_from_prev_row = true;
3155 from_regex = true
3156 }
3157 _ => {}
3158 },
3159
3160 Ordering::Greater => break,
3161 Ordering::Less => {}
3162 }
3163
3164 indent_changes.next();
3165 }
3166
3167 for range in &indent_ranges {
3168 if range.start.row >= row {
3169 break;
3170 }
3171 if range.start.row == prev_row && range.end > row_start {
3172 indent_from_prev_row = true;
3173 }
3174 if range.end > prev_row_start && range.end <= row_start {
3175 outdent_to_row = outdent_to_row.min(range.start.row);
3176 }
3177 }
3178
3179 if let Some(basis_row) = regex_outdent_map.get(&row) {
3180 indent_from_prev_row = false;
3181 outdent_to_row = *basis_row;
3182 from_regex = true;
3183 }
3184
3185 let within_error = error_ranges
3186 .iter()
3187 .any(|e| e.start.row < row && e.end > row_start);
3188
3189 let suggestion = if outdent_to_row == prev_row
3190 || (outdent_from_prev_row && indent_from_prev_row)
3191 {
3192 Some(IndentSuggestion {
3193 basis_row: prev_row,
3194 delta: Ordering::Equal,
3195 within_error: within_error && !from_regex,
3196 })
3197 } else if indent_from_prev_row {
3198 Some(IndentSuggestion {
3199 basis_row: prev_row,
3200 delta: Ordering::Greater,
3201 within_error: within_error && !from_regex,
3202 })
3203 } else if outdent_to_row < prev_row {
3204 Some(IndentSuggestion {
3205 basis_row: outdent_to_row,
3206 delta: Ordering::Equal,
3207 within_error: within_error && !from_regex,
3208 })
3209 } else if outdent_from_prev_row {
3210 Some(IndentSuggestion {
3211 basis_row: prev_row,
3212 delta: Ordering::Less,
3213 within_error: within_error && !from_regex,
3214 })
3215 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3216 {
3217 Some(IndentSuggestion {
3218 basis_row: prev_row,
3219 delta: Ordering::Equal,
3220 within_error: within_error && !from_regex,
3221 })
3222 } else {
3223 None
3224 };
3225
3226 prev_row = row;
3227 prev_row_start = row_start;
3228 suggestion
3229 }))
3230 }
3231
3232 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3233 while row > 0 {
3234 row -= 1;
3235 if !self.is_line_blank(row) {
3236 return Some(row);
3237 }
3238 }
3239 None
3240 }
3241
3242 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3243 let captures = self.syntax.captures(range, &self.text, |grammar| {
3244 grammar.highlights_query.as_ref()
3245 });
3246 let highlight_maps = captures
3247 .grammars()
3248 .iter()
3249 .map(|grammar| grammar.highlight_map())
3250 .collect();
3251 (captures, highlight_maps)
3252 }
3253
3254 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3255 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3256 /// returned in chunks where each chunk has a single syntax highlighting style and
3257 /// diagnostic status.
3258 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3259 let range = range.start.to_offset(self)..range.end.to_offset(self);
3260
3261 let mut syntax = None;
3262 if language_aware {
3263 syntax = Some(self.get_highlights(range.clone()));
3264 }
3265 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3266 let diagnostics = language_aware;
3267 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3268 }
3269
3270 pub fn highlighted_text_for_range<T: ToOffset>(
3271 &self,
3272 range: Range<T>,
3273 override_style: Option<HighlightStyle>,
3274 syntax_theme: &SyntaxTheme,
3275 ) -> HighlightedText {
3276 HighlightedText::from_buffer_range(
3277 range,
3278 &self.text,
3279 &self.syntax,
3280 override_style,
3281 syntax_theme,
3282 )
3283 }
3284
3285 /// Invokes the given callback for each line of text in the given range of the buffer.
3286 /// Uses callback to avoid allocating a string for each line.
3287 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3288 let mut line = String::new();
3289 let mut row = range.start.row;
3290 for chunk in self
3291 .as_rope()
3292 .chunks_in_range(range.to_offset(self))
3293 .chain(["\n"])
3294 {
3295 for (newline_ix, text) in chunk.split('\n').enumerate() {
3296 if newline_ix > 0 {
3297 callback(row, &line);
3298 row += 1;
3299 line.clear();
3300 }
3301 line.push_str(text);
3302 }
3303 }
3304 }
3305
3306 /// Iterates over every [`SyntaxLayer`] in the buffer.
3307 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3308 self.syntax
3309 .layers_for_range(0..self.len(), &self.text, true)
3310 }
3311
3312 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3313 let offset = position.to_offset(self);
3314 self.syntax
3315 .layers_for_range(offset..offset, &self.text, false)
3316 .filter(|l| l.node().end_byte() > offset)
3317 .last()
3318 }
3319
3320 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3321 &self,
3322 range: Range<D>,
3323 ) -> Option<SyntaxLayer<'_>> {
3324 let range = range.to_offset(self);
3325 self.syntax
3326 .layers_for_range(range, &self.text, false)
3327 .max_by(|a, b| {
3328 if a.depth != b.depth {
3329 a.depth.cmp(&b.depth)
3330 } else if a.offset.0 != b.offset.0 {
3331 a.offset.0.cmp(&b.offset.0)
3332 } else {
3333 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3334 }
3335 })
3336 }
3337
3338 /// Returns the main [`Language`].
3339 pub fn language(&self) -> Option<&Arc<Language>> {
3340 self.language.as_ref()
3341 }
3342
3343 /// Returns the [`Language`] at the given location.
3344 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3345 self.syntax_layer_at(position)
3346 .map(|info| info.language)
3347 .or(self.language.as_ref())
3348 }
3349
3350 /// Returns the settings for the language at the given location.
3351 pub fn settings_at<'a, D: ToOffset>(
3352 &'a self,
3353 position: D,
3354 cx: &'a App,
3355 ) -> Cow<'a, LanguageSettings> {
3356 language_settings(
3357 self.language_at(position).map(|l| l.name()),
3358 self.file.as_ref(),
3359 cx,
3360 )
3361 }
3362
3363 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3364 CharClassifier::new(self.language_scope_at(point))
3365 }
3366
3367 /// Returns the [`LanguageScope`] at the given location.
3368 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3369 let offset = position.to_offset(self);
3370 let mut scope = None;
3371 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3372
3373 // Use the layer that has the smallest node intersecting the given point.
3374 for layer in self
3375 .syntax
3376 .layers_for_range(offset..offset, &self.text, false)
3377 {
3378 let mut cursor = layer.node().walk();
3379
3380 let mut range = None;
3381 loop {
3382 let child_range = cursor.node().byte_range();
3383 if !child_range.contains(&offset) {
3384 break;
3385 }
3386
3387 range = Some(child_range);
3388 if cursor.goto_first_child_for_byte(offset).is_none() {
3389 break;
3390 }
3391 }
3392
3393 if let Some(range) = range
3394 && smallest_range_and_depth.as_ref().is_none_or(
3395 |(smallest_range, smallest_range_depth)| {
3396 if layer.depth > *smallest_range_depth {
3397 true
3398 } else if layer.depth == *smallest_range_depth {
3399 range.len() < smallest_range.len()
3400 } else {
3401 false
3402 }
3403 },
3404 )
3405 {
3406 smallest_range_and_depth = Some((range, layer.depth));
3407 scope = Some(LanguageScope {
3408 language: layer.language.clone(),
3409 override_id: layer.override_id(offset, &self.text),
3410 });
3411 }
3412 }
3413
3414 scope.or_else(|| {
3415 self.language.clone().map(|language| LanguageScope {
3416 language,
3417 override_id: None,
3418 })
3419 })
3420 }
3421
3422 /// Returns a tuple of the range and character kind of the word
3423 /// surrounding the given position.
3424 pub fn surrounding_word<T: ToOffset>(
3425 &self,
3426 start: T,
3427 for_completion: bool,
3428 ) -> (Range<usize>, Option<CharKind>) {
3429 let mut start = start.to_offset(self);
3430 let mut end = start;
3431 let mut next_chars = self.chars_at(start).take(128).peekable();
3432 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3433
3434 let classifier = self
3435 .char_classifier_at(start)
3436 .for_completion(for_completion);
3437 let word_kind = cmp::max(
3438 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3439 next_chars.peek().copied().map(|c| classifier.kind(c)),
3440 );
3441
3442 for ch in prev_chars {
3443 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3444 start -= ch.len_utf8();
3445 } else {
3446 break;
3447 }
3448 }
3449
3450 for ch in next_chars {
3451 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3452 end += ch.len_utf8();
3453 } else {
3454 break;
3455 }
3456 }
3457
3458 (start..end, word_kind)
3459 }
3460
3461 /// Returns the closest syntax node enclosing the given range.
3462 /// Positions a tree cursor at the leaf node that contains or touches the given range.
3463 /// This is shared logic used by syntax navigation methods.
3464 fn position_cursor_at_range(cursor: &mut tree_sitter::TreeCursor, range: &Range<usize>) {
3465 // Descend to the first leaf that touches the start of the range.
3466 //
3467 // If the range is non-empty and the current node ends exactly at the start,
3468 // move to the next sibling to find a node that extends beyond the start.
3469 //
3470 // If the range is empty and the current node starts after the range position,
3471 // move to the previous sibling to find the node that contains the position.
3472 while cursor.goto_first_child_for_byte(range.start).is_some() {
3473 if !range.is_empty() && cursor.node().end_byte() == range.start {
3474 cursor.goto_next_sibling();
3475 }
3476 if range.is_empty() && cursor.node().start_byte() > range.start {
3477 cursor.goto_previous_sibling();
3478 }
3479 }
3480 }
3481
3482 /// Moves the cursor to find a node that contains the given range.
3483 /// Returns true if such a node is found, false otherwise.
3484 /// This is shared logic used by syntax navigation methods.
3485 fn find_containing_node(
3486 cursor: &mut tree_sitter::TreeCursor,
3487 range: &Range<usize>,
3488 strict: bool,
3489 ) -> bool {
3490 loop {
3491 let node_range = cursor.node().byte_range();
3492
3493 if node_range.start <= range.start
3494 && node_range.end >= range.end
3495 && (!strict || node_range.len() > range.len())
3496 {
3497 return true;
3498 }
3499 if !cursor.goto_parent() {
3500 return false;
3501 }
3502 }
3503 }
3504
3505 pub fn syntax_ancestor<'a, T: ToOffset>(
3506 &'a self,
3507 range: Range<T>,
3508 ) -> Option<tree_sitter::Node<'a>> {
3509 let range = range.start.to_offset(self)..range.end.to_offset(self);
3510 let mut result: Option<tree_sitter::Node<'a>> = None;
3511 for layer in self
3512 .syntax
3513 .layers_for_range(range.clone(), &self.text, true)
3514 {
3515 let mut cursor = layer.node().walk();
3516
3517 Self::position_cursor_at_range(&mut cursor, &range);
3518
3519 // Ascend to the smallest ancestor that strictly contains the range.
3520 if !Self::find_containing_node(&mut cursor, &range, true) {
3521 continue;
3522 }
3523
3524 let left_node = cursor.node();
3525 let mut layer_result = left_node;
3526
3527 // For an empty range, try to find another node immediately to the right of the range.
3528 if left_node.end_byte() == range.start {
3529 let mut right_node = None;
3530 while !cursor.goto_next_sibling() {
3531 if !cursor.goto_parent() {
3532 break;
3533 }
3534 }
3535
3536 while cursor.node().start_byte() == range.start {
3537 right_node = Some(cursor.node());
3538 if !cursor.goto_first_child() {
3539 break;
3540 }
3541 }
3542
3543 // If there is a candidate node on both sides of the (empty) range, then
3544 // decide between the two by favoring a named node over an anonymous token.
3545 // If both nodes are the same in that regard, favor the right one.
3546 if let Some(right_node) = right_node
3547 && (right_node.is_named() || !left_node.is_named())
3548 {
3549 layer_result = right_node;
3550 }
3551 }
3552
3553 if let Some(previous_result) = &result
3554 && previous_result.byte_range().len() < layer_result.byte_range().len()
3555 {
3556 continue;
3557 }
3558 result = Some(layer_result);
3559 }
3560
3561 result
3562 }
3563
3564 /// Find the previous sibling syntax node at the given range.
3565 ///
3566 /// This function locates the syntax node that precedes the node containing
3567 /// the given range. It searches hierarchically by:
3568 /// 1. Finding the node that contains the given range
3569 /// 2. Looking for the previous sibling at the same tree level
3570 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3571 ///
3572 /// Returns `None` if there is no previous sibling at any ancestor level.
3573 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3574 &'a self,
3575 range: Range<T>,
3576 ) -> Option<tree_sitter::Node<'a>> {
3577 let range = range.start.to_offset(self)..range.end.to_offset(self);
3578 let mut result: Option<tree_sitter::Node<'a>> = None;
3579
3580 for layer in self
3581 .syntax
3582 .layers_for_range(range.clone(), &self.text, true)
3583 {
3584 let mut cursor = layer.node().walk();
3585
3586 Self::position_cursor_at_range(&mut cursor, &range);
3587
3588 // Find the node that contains the range
3589 if !Self::find_containing_node(&mut cursor, &range, false) {
3590 continue;
3591 }
3592
3593 // Look for the previous sibling, moving up ancestor levels if needed
3594 loop {
3595 if cursor.goto_previous_sibling() {
3596 let layer_result = cursor.node();
3597
3598 if let Some(previous_result) = &result {
3599 if previous_result.byte_range().end < layer_result.byte_range().end {
3600 continue;
3601 }
3602 }
3603 result = Some(layer_result);
3604 break;
3605 }
3606
3607 // No sibling found at this level, try moving up to parent
3608 if !cursor.goto_parent() {
3609 break;
3610 }
3611 }
3612 }
3613
3614 result
3615 }
3616
3617 /// Find the next sibling syntax node at the given range.
3618 ///
3619 /// This function locates the syntax node that follows the node containing
3620 /// the given range. It searches hierarchically by:
3621 /// 1. Finding the node that contains the given range
3622 /// 2. Looking for the next sibling at the same tree level
3623 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3624 ///
3625 /// Returns `None` if there is no next sibling at any ancestor level.
3626 pub fn syntax_next_sibling<'a, T: ToOffset>(
3627 &'a self,
3628 range: Range<T>,
3629 ) -> Option<tree_sitter::Node<'a>> {
3630 let range = range.start.to_offset(self)..range.end.to_offset(self);
3631 let mut result: Option<tree_sitter::Node<'a>> = None;
3632
3633 for layer in self
3634 .syntax
3635 .layers_for_range(range.clone(), &self.text, true)
3636 {
3637 let mut cursor = layer.node().walk();
3638
3639 Self::position_cursor_at_range(&mut cursor, &range);
3640
3641 // Find the node that contains the range
3642 if !Self::find_containing_node(&mut cursor, &range, false) {
3643 continue;
3644 }
3645
3646 // Look for the next sibling, moving up ancestor levels if needed
3647 loop {
3648 if cursor.goto_next_sibling() {
3649 let layer_result = cursor.node();
3650
3651 if let Some(previous_result) = &result {
3652 if previous_result.byte_range().start > layer_result.byte_range().start {
3653 continue;
3654 }
3655 }
3656 result = Some(layer_result);
3657 break;
3658 }
3659
3660 // No sibling found at this level, try moving up to parent
3661 if !cursor.goto_parent() {
3662 break;
3663 }
3664 }
3665 }
3666
3667 result
3668 }
3669
3670 /// Returns the root syntax node within the given row
3671 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3672 let start_offset = position.to_offset(self);
3673
3674 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3675
3676 let layer = self
3677 .syntax
3678 .layers_for_range(start_offset..start_offset, &self.text, true)
3679 .next()?;
3680
3681 let mut cursor = layer.node().walk();
3682
3683 // Descend to the first leaf that touches the start of the range.
3684 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3685 if cursor.node().end_byte() == start_offset {
3686 cursor.goto_next_sibling();
3687 }
3688 }
3689
3690 // Ascend to the root node within the same row.
3691 while cursor.goto_parent() {
3692 if cursor.node().start_position().row != row {
3693 break;
3694 }
3695 }
3696
3697 Some(cursor.node())
3698 }
3699
3700 /// Returns the outline for the buffer.
3701 ///
3702 /// This method allows passing an optional [`SyntaxTheme`] to
3703 /// syntax-highlight the returned symbols.
3704 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3705 self.outline_items_containing(0..self.len(), true, theme)
3706 .map(Outline::new)
3707 }
3708
3709 /// Returns all the symbols that contain the given position.
3710 ///
3711 /// This method allows passing an optional [`SyntaxTheme`] to
3712 /// syntax-highlight the returned symbols.
3713 pub fn symbols_containing<T: ToOffset>(
3714 &self,
3715 position: T,
3716 theme: Option<&SyntaxTheme>,
3717 ) -> Option<Vec<OutlineItem<Anchor>>> {
3718 let position = position.to_offset(self);
3719 let mut items = self.outline_items_containing(
3720 position.saturating_sub(1)..self.len().min(position + 1),
3721 false,
3722 theme,
3723 )?;
3724 let mut prev_depth = None;
3725 items.retain(|item| {
3726 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3727 prev_depth = Some(item.depth);
3728 result
3729 });
3730 Some(items)
3731 }
3732
3733 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3734 let range = range.to_offset(self);
3735 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3736 grammar.outline_config.as_ref().map(|c| &c.query)
3737 });
3738 let configs = matches
3739 .grammars()
3740 .iter()
3741 .map(|g| g.outline_config.as_ref().unwrap())
3742 .collect::<Vec<_>>();
3743
3744 while let Some(mat) = matches.peek() {
3745 let config = &configs[mat.grammar_index];
3746 let containing_item_node = maybe!({
3747 let item_node = mat.captures.iter().find_map(|cap| {
3748 if cap.index == config.item_capture_ix {
3749 Some(cap.node)
3750 } else {
3751 None
3752 }
3753 })?;
3754
3755 let item_byte_range = item_node.byte_range();
3756 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3757 None
3758 } else {
3759 Some(item_node)
3760 }
3761 });
3762
3763 if let Some(item_node) = containing_item_node {
3764 return Some(
3765 Point::from_ts_point(item_node.start_position())
3766 ..Point::from_ts_point(item_node.end_position()),
3767 );
3768 }
3769
3770 matches.advance();
3771 }
3772 None
3773 }
3774
3775 pub fn outline_items_containing<T: ToOffset>(
3776 &self,
3777 range: Range<T>,
3778 include_extra_context: bool,
3779 theme: Option<&SyntaxTheme>,
3780 ) -> Option<Vec<OutlineItem<Anchor>>> {
3781 let range = range.to_offset(self);
3782 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3783 grammar.outline_config.as_ref().map(|c| &c.query)
3784 });
3785 let configs = matches
3786 .grammars()
3787 .iter()
3788 .map(|g| g.outline_config.as_ref().unwrap())
3789 .collect::<Vec<_>>();
3790
3791 let mut items = Vec::new();
3792 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3793 while let Some(mat) = matches.peek() {
3794 let config = &configs[mat.grammar_index];
3795 if let Some(item) =
3796 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3797 {
3798 items.push(item);
3799 } else if let Some(capture) = mat
3800 .captures
3801 .iter()
3802 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3803 {
3804 let capture_range = capture.node.start_position()..capture.node.end_position();
3805 let mut capture_row_range =
3806 capture_range.start.row as u32..capture_range.end.row as u32;
3807 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3808 {
3809 capture_row_range.end -= 1;
3810 }
3811 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3812 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3813 last_row_range.end = capture_row_range.end;
3814 } else {
3815 annotation_row_ranges.push(capture_row_range);
3816 }
3817 } else {
3818 annotation_row_ranges.push(capture_row_range);
3819 }
3820 }
3821 matches.advance();
3822 }
3823
3824 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3825
3826 // Assign depths based on containment relationships and convert to anchors.
3827 let mut item_ends_stack = Vec::<Point>::new();
3828 let mut anchor_items = Vec::new();
3829 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3830 for item in items {
3831 while let Some(last_end) = item_ends_stack.last().copied() {
3832 if last_end < item.range.end {
3833 item_ends_stack.pop();
3834 } else {
3835 break;
3836 }
3837 }
3838
3839 let mut annotation_row_range = None;
3840 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3841 let row_preceding_item = item.range.start.row.saturating_sub(1);
3842 if next_annotation_row_range.end < row_preceding_item {
3843 annotation_row_ranges.next();
3844 } else {
3845 if next_annotation_row_range.end == row_preceding_item {
3846 annotation_row_range = Some(next_annotation_row_range.clone());
3847 annotation_row_ranges.next();
3848 }
3849 break;
3850 }
3851 }
3852
3853 anchor_items.push(OutlineItem {
3854 depth: item_ends_stack.len(),
3855 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3856 text: item.text,
3857 highlight_ranges: item.highlight_ranges,
3858 name_ranges: item.name_ranges,
3859 body_range: item.body_range.map(|body_range| {
3860 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3861 }),
3862 annotation_range: annotation_row_range.map(|annotation_range| {
3863 self.anchor_after(Point::new(annotation_range.start, 0))
3864 ..self.anchor_before(Point::new(
3865 annotation_range.end,
3866 self.line_len(annotation_range.end),
3867 ))
3868 }),
3869 });
3870 item_ends_stack.push(item.range.end);
3871 }
3872
3873 Some(anchor_items)
3874 }
3875
3876 fn next_outline_item(
3877 &self,
3878 config: &OutlineConfig,
3879 mat: &SyntaxMapMatch,
3880 range: &Range<usize>,
3881 include_extra_context: bool,
3882 theme: Option<&SyntaxTheme>,
3883 ) -> Option<OutlineItem<Point>> {
3884 let item_node = mat.captures.iter().find_map(|cap| {
3885 if cap.index == config.item_capture_ix {
3886 Some(cap.node)
3887 } else {
3888 None
3889 }
3890 })?;
3891
3892 let item_byte_range = item_node.byte_range();
3893 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3894 return None;
3895 }
3896 let item_point_range = Point::from_ts_point(item_node.start_position())
3897 ..Point::from_ts_point(item_node.end_position());
3898
3899 let mut open_point = None;
3900 let mut close_point = None;
3901 let mut buffer_ranges = Vec::new();
3902 for capture in mat.captures {
3903 let node_is_name;
3904 if capture.index == config.name_capture_ix {
3905 node_is_name = true;
3906 } else if Some(capture.index) == config.context_capture_ix
3907 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3908 {
3909 node_is_name = false;
3910 } else {
3911 if Some(capture.index) == config.open_capture_ix {
3912 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3913 } else if Some(capture.index) == config.close_capture_ix {
3914 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3915 }
3916
3917 continue;
3918 }
3919
3920 let mut range = capture.node.start_byte()..capture.node.end_byte();
3921 let start = capture.node.start_position();
3922 if capture.node.end_position().row > start.row {
3923 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3924 }
3925
3926 if !range.is_empty() {
3927 buffer_ranges.push((range, node_is_name));
3928 }
3929 }
3930 if buffer_ranges.is_empty() {
3931 return None;
3932 }
3933 let mut text = String::new();
3934 let mut highlight_ranges = Vec::new();
3935 let mut name_ranges = Vec::new();
3936 let mut chunks = self.chunks(
3937 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3938 true,
3939 );
3940 let mut last_buffer_range_end = 0;
3941
3942 for (buffer_range, is_name) in buffer_ranges {
3943 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3944 if space_added {
3945 text.push(' ');
3946 }
3947 let before_append_len = text.len();
3948 let mut offset = buffer_range.start;
3949 chunks.seek(buffer_range.clone());
3950 for mut chunk in chunks.by_ref() {
3951 if chunk.text.len() > buffer_range.end - offset {
3952 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3953 offset = buffer_range.end;
3954 } else {
3955 offset += chunk.text.len();
3956 }
3957 let style = chunk
3958 .syntax_highlight_id
3959 .zip(theme)
3960 .and_then(|(highlight, theme)| highlight.style(theme));
3961 if let Some(style) = style {
3962 let start = text.len();
3963 let end = start + chunk.text.len();
3964 highlight_ranges.push((start..end, style));
3965 }
3966 text.push_str(chunk.text);
3967 if offset >= buffer_range.end {
3968 break;
3969 }
3970 }
3971 if is_name {
3972 let after_append_len = text.len();
3973 let start = if space_added && !name_ranges.is_empty() {
3974 before_append_len - 1
3975 } else {
3976 before_append_len
3977 };
3978 name_ranges.push(start..after_append_len);
3979 }
3980 last_buffer_range_end = buffer_range.end;
3981 }
3982
3983 Some(OutlineItem {
3984 depth: 0, // We'll calculate the depth later
3985 range: item_point_range,
3986 text,
3987 highlight_ranges,
3988 name_ranges,
3989 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3990 annotation_range: None,
3991 })
3992 }
3993
3994 pub fn function_body_fold_ranges<T: ToOffset>(
3995 &self,
3996 within: Range<T>,
3997 ) -> impl Iterator<Item = Range<usize>> + '_ {
3998 self.text_object_ranges(within, TreeSitterOptions::default())
3999 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4000 }
4001
4002 /// For each grammar in the language, runs the provided
4003 /// [`tree_sitter::Query`] against the given range.
4004 pub fn matches(
4005 &self,
4006 range: Range<usize>,
4007 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4008 ) -> SyntaxMapMatches<'_> {
4009 self.syntax.matches(range, self, query)
4010 }
4011
4012 pub fn all_bracket_ranges(
4013 &self,
4014 range: Range<usize>,
4015 ) -> impl Iterator<Item = BracketMatch> + '_ {
4016 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4017 grammar.brackets_config.as_ref().map(|c| &c.query)
4018 });
4019 let configs = matches
4020 .grammars()
4021 .iter()
4022 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4023 .collect::<Vec<_>>();
4024
4025 iter::from_fn(move || {
4026 while let Some(mat) = matches.peek() {
4027 let mut open = None;
4028 let mut close = None;
4029 let config = &configs[mat.grammar_index];
4030 let pattern = &config.patterns[mat.pattern_index];
4031 for capture in mat.captures {
4032 if capture.index == config.open_capture_ix {
4033 open = Some(capture.node.byte_range());
4034 } else if capture.index == config.close_capture_ix {
4035 close = Some(capture.node.byte_range());
4036 }
4037 }
4038
4039 matches.advance();
4040
4041 let Some((open_range, close_range)) = open.zip(close) else {
4042 continue;
4043 };
4044
4045 let bracket_range = open_range.start..=close_range.end;
4046 if !bracket_range.overlaps(&range) {
4047 continue;
4048 }
4049
4050 return Some(BracketMatch {
4051 open_range,
4052 close_range,
4053 newline_only: pattern.newline_only,
4054 });
4055 }
4056 None
4057 })
4058 }
4059
4060 /// Returns bracket range pairs overlapping or adjacent to `range`
4061 pub fn bracket_ranges<T: ToOffset>(
4062 &self,
4063 range: Range<T>,
4064 ) -> impl Iterator<Item = BracketMatch> + '_ {
4065 // Find bracket pairs that *inclusively* contain the given range.
4066 let range = range.start.to_offset(self).saturating_sub(1)
4067 ..self.len().min(range.end.to_offset(self) + 1);
4068 self.all_bracket_ranges(range)
4069 .filter(|pair| !pair.newline_only)
4070 }
4071
4072 pub fn debug_variables_query<T: ToOffset>(
4073 &self,
4074 range: Range<T>,
4075 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4076 let range = range.start.to_offset(self).saturating_sub(1)
4077 ..self.len().min(range.end.to_offset(self) + 1);
4078
4079 let mut matches = self.syntax.matches_with_options(
4080 range.clone(),
4081 &self.text,
4082 TreeSitterOptions::default(),
4083 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4084 );
4085
4086 let configs = matches
4087 .grammars()
4088 .iter()
4089 .map(|grammar| grammar.debug_variables_config.as_ref())
4090 .collect::<Vec<_>>();
4091
4092 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4093
4094 iter::from_fn(move || {
4095 loop {
4096 while let Some(capture) = captures.pop() {
4097 if capture.0.overlaps(&range) {
4098 return Some(capture);
4099 }
4100 }
4101
4102 let mat = matches.peek()?;
4103
4104 let Some(config) = configs[mat.grammar_index].as_ref() else {
4105 matches.advance();
4106 continue;
4107 };
4108
4109 for capture in mat.captures {
4110 let Some(ix) = config
4111 .objects_by_capture_ix
4112 .binary_search_by_key(&capture.index, |e| e.0)
4113 .ok()
4114 else {
4115 continue;
4116 };
4117 let text_object = config.objects_by_capture_ix[ix].1;
4118 let byte_range = capture.node.byte_range();
4119
4120 let mut found = false;
4121 for (range, existing) in captures.iter_mut() {
4122 if existing == &text_object {
4123 range.start = range.start.min(byte_range.start);
4124 range.end = range.end.max(byte_range.end);
4125 found = true;
4126 break;
4127 }
4128 }
4129
4130 if !found {
4131 captures.push((byte_range, text_object));
4132 }
4133 }
4134
4135 matches.advance();
4136 }
4137 })
4138 }
4139
4140 pub fn text_object_ranges<T: ToOffset>(
4141 &self,
4142 range: Range<T>,
4143 options: TreeSitterOptions,
4144 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4145 let range = range.start.to_offset(self).saturating_sub(1)
4146 ..self.len().min(range.end.to_offset(self) + 1);
4147
4148 let mut matches =
4149 self.syntax
4150 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4151 grammar.text_object_config.as_ref().map(|c| &c.query)
4152 });
4153
4154 let configs = matches
4155 .grammars()
4156 .iter()
4157 .map(|grammar| grammar.text_object_config.as_ref())
4158 .collect::<Vec<_>>();
4159
4160 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4161
4162 iter::from_fn(move || {
4163 loop {
4164 while let Some(capture) = captures.pop() {
4165 if capture.0.overlaps(&range) {
4166 return Some(capture);
4167 }
4168 }
4169
4170 let mat = matches.peek()?;
4171
4172 let Some(config) = configs[mat.grammar_index].as_ref() else {
4173 matches.advance();
4174 continue;
4175 };
4176
4177 for capture in mat.captures {
4178 let Some(ix) = config
4179 .text_objects_by_capture_ix
4180 .binary_search_by_key(&capture.index, |e| e.0)
4181 .ok()
4182 else {
4183 continue;
4184 };
4185 let text_object = config.text_objects_by_capture_ix[ix].1;
4186 let byte_range = capture.node.byte_range();
4187
4188 let mut found = false;
4189 for (range, existing) in captures.iter_mut() {
4190 if existing == &text_object {
4191 range.start = range.start.min(byte_range.start);
4192 range.end = range.end.max(byte_range.end);
4193 found = true;
4194 break;
4195 }
4196 }
4197
4198 if !found {
4199 captures.push((byte_range, text_object));
4200 }
4201 }
4202
4203 matches.advance();
4204 }
4205 })
4206 }
4207
4208 /// Returns enclosing bracket ranges containing the given range
4209 pub fn enclosing_bracket_ranges<T: ToOffset>(
4210 &self,
4211 range: Range<T>,
4212 ) -> impl Iterator<Item = BracketMatch> + '_ {
4213 let range = range.start.to_offset(self)..range.end.to_offset(self);
4214
4215 self.bracket_ranges(range.clone()).filter(move |pair| {
4216 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4217 })
4218 }
4219
4220 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4221 ///
4222 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4223 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4224 &self,
4225 range: Range<T>,
4226 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4227 ) -> Option<(Range<usize>, Range<usize>)> {
4228 let range = range.start.to_offset(self)..range.end.to_offset(self);
4229
4230 // Get the ranges of the innermost pair of brackets.
4231 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4232
4233 for pair in self.enclosing_bracket_ranges(range) {
4234 if let Some(range_filter) = range_filter
4235 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4236 {
4237 continue;
4238 }
4239
4240 let len = pair.close_range.end - pair.open_range.start;
4241
4242 if let Some((existing_open, existing_close)) = &result {
4243 let existing_len = existing_close.end - existing_open.start;
4244 if len > existing_len {
4245 continue;
4246 }
4247 }
4248
4249 result = Some((pair.open_range, pair.close_range));
4250 }
4251
4252 result
4253 }
4254
4255 /// Returns anchor ranges for any matches of the redaction query.
4256 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4257 /// will be run on the relevant section of the buffer.
4258 pub fn redacted_ranges<T: ToOffset>(
4259 &self,
4260 range: Range<T>,
4261 ) -> impl Iterator<Item = Range<usize>> + '_ {
4262 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4263 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4264 grammar
4265 .redactions_config
4266 .as_ref()
4267 .map(|config| &config.query)
4268 });
4269
4270 let configs = syntax_matches
4271 .grammars()
4272 .iter()
4273 .map(|grammar| grammar.redactions_config.as_ref())
4274 .collect::<Vec<_>>();
4275
4276 iter::from_fn(move || {
4277 let redacted_range = syntax_matches
4278 .peek()
4279 .and_then(|mat| {
4280 configs[mat.grammar_index].and_then(|config| {
4281 mat.captures
4282 .iter()
4283 .find(|capture| capture.index == config.redaction_capture_ix)
4284 })
4285 })
4286 .map(|mat| mat.node.byte_range());
4287 syntax_matches.advance();
4288 redacted_range
4289 })
4290 }
4291
4292 pub fn injections_intersecting_range<T: ToOffset>(
4293 &self,
4294 range: Range<T>,
4295 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4296 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4297
4298 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4299 grammar
4300 .injection_config
4301 .as_ref()
4302 .map(|config| &config.query)
4303 });
4304
4305 let configs = syntax_matches
4306 .grammars()
4307 .iter()
4308 .map(|grammar| grammar.injection_config.as_ref())
4309 .collect::<Vec<_>>();
4310
4311 iter::from_fn(move || {
4312 let ranges = syntax_matches.peek().and_then(|mat| {
4313 let config = &configs[mat.grammar_index]?;
4314 let content_capture_range = mat.captures.iter().find_map(|capture| {
4315 if capture.index == config.content_capture_ix {
4316 Some(capture.node.byte_range())
4317 } else {
4318 None
4319 }
4320 })?;
4321 let language = self.language_at(content_capture_range.start)?;
4322 Some((content_capture_range, language))
4323 });
4324 syntax_matches.advance();
4325 ranges
4326 })
4327 }
4328
4329 pub fn runnable_ranges(
4330 &self,
4331 offset_range: Range<usize>,
4332 ) -> impl Iterator<Item = RunnableRange> + '_ {
4333 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4334 grammar.runnable_config.as_ref().map(|config| &config.query)
4335 });
4336
4337 let test_configs = syntax_matches
4338 .grammars()
4339 .iter()
4340 .map(|grammar| grammar.runnable_config.as_ref())
4341 .collect::<Vec<_>>();
4342
4343 iter::from_fn(move || {
4344 loop {
4345 let mat = syntax_matches.peek()?;
4346
4347 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4348 let mut run_range = None;
4349 let full_range = mat.captures.iter().fold(
4350 Range {
4351 start: usize::MAX,
4352 end: 0,
4353 },
4354 |mut acc, next| {
4355 let byte_range = next.node.byte_range();
4356 if acc.start > byte_range.start {
4357 acc.start = byte_range.start;
4358 }
4359 if acc.end < byte_range.end {
4360 acc.end = byte_range.end;
4361 }
4362 acc
4363 },
4364 );
4365 if full_range.start > full_range.end {
4366 // We did not find a full spanning range of this match.
4367 return None;
4368 }
4369 let extra_captures: SmallVec<[_; 1]> =
4370 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4371 test_configs
4372 .extra_captures
4373 .get(capture.index as usize)
4374 .cloned()
4375 .and_then(|tag_name| match tag_name {
4376 RunnableCapture::Named(name) => {
4377 Some((capture.node.byte_range(), name))
4378 }
4379 RunnableCapture::Run => {
4380 let _ = run_range.insert(capture.node.byte_range());
4381 None
4382 }
4383 })
4384 }));
4385 let run_range = run_range?;
4386 let tags = test_configs
4387 .query
4388 .property_settings(mat.pattern_index)
4389 .iter()
4390 .filter_map(|property| {
4391 if *property.key == *"tag" {
4392 property
4393 .value
4394 .as_ref()
4395 .map(|value| RunnableTag(value.to_string().into()))
4396 } else {
4397 None
4398 }
4399 })
4400 .collect();
4401 let extra_captures = extra_captures
4402 .into_iter()
4403 .map(|(range, name)| {
4404 (
4405 name.to_string(),
4406 self.text_for_range(range).collect::<String>(),
4407 )
4408 })
4409 .collect();
4410 // All tags should have the same range.
4411 Some(RunnableRange {
4412 run_range,
4413 full_range,
4414 runnable: Runnable {
4415 tags,
4416 language: mat.language,
4417 buffer: self.remote_id(),
4418 },
4419 extra_captures,
4420 buffer_id: self.remote_id(),
4421 })
4422 });
4423
4424 syntax_matches.advance();
4425 if test_range.is_some() {
4426 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4427 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4428 return test_range;
4429 }
4430 }
4431 })
4432 }
4433
4434 /// Returns selections for remote peers intersecting the given range.
4435 #[allow(clippy::type_complexity)]
4436 pub fn selections_in_range(
4437 &self,
4438 range: Range<Anchor>,
4439 include_local: bool,
4440 ) -> impl Iterator<
4441 Item = (
4442 ReplicaId,
4443 bool,
4444 CursorShape,
4445 impl Iterator<Item = &Selection<Anchor>> + '_,
4446 ),
4447 > + '_ {
4448 self.remote_selections
4449 .iter()
4450 .filter(move |(replica_id, set)| {
4451 (include_local || **replica_id != self.text.replica_id())
4452 && !set.selections.is_empty()
4453 })
4454 .map(move |(replica_id, set)| {
4455 let start_ix = match set.selections.binary_search_by(|probe| {
4456 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4457 }) {
4458 Ok(ix) | Err(ix) => ix,
4459 };
4460 let end_ix = match set.selections.binary_search_by(|probe| {
4461 probe.start.cmp(&range.end, self).then(Ordering::Less)
4462 }) {
4463 Ok(ix) | Err(ix) => ix,
4464 };
4465
4466 (
4467 *replica_id,
4468 set.line_mode,
4469 set.cursor_shape,
4470 set.selections[start_ix..end_ix].iter(),
4471 )
4472 })
4473 }
4474
4475 /// Returns if the buffer contains any diagnostics.
4476 pub fn has_diagnostics(&self) -> bool {
4477 !self.diagnostics.is_empty()
4478 }
4479
4480 /// Returns all the diagnostics intersecting the given range.
4481 pub fn diagnostics_in_range<'a, T, O>(
4482 &'a self,
4483 search_range: Range<T>,
4484 reversed: bool,
4485 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4486 where
4487 T: 'a + Clone + ToOffset,
4488 O: 'a + FromAnchor,
4489 {
4490 let mut iterators: Vec<_> = self
4491 .diagnostics
4492 .iter()
4493 .map(|(_, collection)| {
4494 collection
4495 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4496 .peekable()
4497 })
4498 .collect();
4499
4500 std::iter::from_fn(move || {
4501 let (next_ix, _) = iterators
4502 .iter_mut()
4503 .enumerate()
4504 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4505 .min_by(|(_, a), (_, b)| {
4506 let cmp = a
4507 .range
4508 .start
4509 .cmp(&b.range.start, self)
4510 // when range is equal, sort by diagnostic severity
4511 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4512 // and stabilize order with group_id
4513 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4514 if reversed { cmp.reverse() } else { cmp }
4515 })?;
4516 iterators[next_ix]
4517 .next()
4518 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4519 diagnostic,
4520 range: FromAnchor::from_anchor(&range.start, self)
4521 ..FromAnchor::from_anchor(&range.end, self),
4522 })
4523 })
4524 }
4525
4526 /// Returns all the diagnostic groups associated with the given
4527 /// language server ID. If no language server ID is provided,
4528 /// all diagnostics groups are returned.
4529 pub fn diagnostic_groups(
4530 &self,
4531 language_server_id: Option<LanguageServerId>,
4532 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4533 let mut groups = Vec::new();
4534
4535 if let Some(language_server_id) = language_server_id {
4536 if let Ok(ix) = self
4537 .diagnostics
4538 .binary_search_by_key(&language_server_id, |e| e.0)
4539 {
4540 self.diagnostics[ix]
4541 .1
4542 .groups(language_server_id, &mut groups, self);
4543 }
4544 } else {
4545 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4546 diagnostics.groups(*language_server_id, &mut groups, self);
4547 }
4548 }
4549
4550 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4551 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4552 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4553 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4554 });
4555
4556 groups
4557 }
4558
4559 /// Returns an iterator over the diagnostics for the given group.
4560 pub fn diagnostic_group<O>(
4561 &self,
4562 group_id: usize,
4563 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4564 where
4565 O: FromAnchor + 'static,
4566 {
4567 self.diagnostics
4568 .iter()
4569 .flat_map(move |(_, set)| set.group(group_id, self))
4570 }
4571
4572 /// An integer version number that accounts for all updates besides
4573 /// the buffer's text itself (which is versioned via a version vector).
4574 pub fn non_text_state_update_count(&self) -> usize {
4575 self.non_text_state_update_count
4576 }
4577
4578 /// An integer version that changes when the buffer's syntax changes.
4579 pub fn syntax_update_count(&self) -> usize {
4580 self.syntax.update_count()
4581 }
4582
4583 /// Returns a snapshot of underlying file.
4584 pub fn file(&self) -> Option<&Arc<dyn File>> {
4585 self.file.as_ref()
4586 }
4587
4588 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4589 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4590 if let Some(file) = self.file() {
4591 if file.path().file_name().is_none() || include_root {
4592 Some(file.full_path(cx))
4593 } else {
4594 Some(file.path().to_path_buf())
4595 }
4596 } else {
4597 None
4598 }
4599 }
4600
4601 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4602 let query_str = query.fuzzy_contents;
4603 if query_str.is_some_and(|query| query.is_empty()) {
4604 return BTreeMap::default();
4605 }
4606
4607 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4608 language,
4609 override_id: None,
4610 }));
4611
4612 let mut query_ix = 0;
4613 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4614 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4615
4616 let mut words = BTreeMap::default();
4617 let mut current_word_start_ix = None;
4618 let mut chunk_ix = query.range.start;
4619 for chunk in self.chunks(query.range, false) {
4620 for (i, c) in chunk.text.char_indices() {
4621 let ix = chunk_ix + i;
4622 if classifier.is_word(c) {
4623 if current_word_start_ix.is_none() {
4624 current_word_start_ix = Some(ix);
4625 }
4626
4627 if let Some(query_chars) = &query_chars
4628 && query_ix < query_len
4629 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4630 {
4631 query_ix += 1;
4632 }
4633 continue;
4634 } else if let Some(word_start) = current_word_start_ix.take()
4635 && query_ix == query_len
4636 {
4637 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4638 let mut word_text = self.text_for_range(word_start..ix).peekable();
4639 let first_char = word_text
4640 .peek()
4641 .and_then(|first_chunk| first_chunk.chars().next());
4642 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4643 if !query.skip_digits
4644 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4645 {
4646 words.insert(word_text.collect(), word_range);
4647 }
4648 }
4649 query_ix = 0;
4650 }
4651 chunk_ix += chunk.text.len();
4652 }
4653
4654 words
4655 }
4656}
4657
4658pub struct WordsQuery<'a> {
4659 /// Only returns words with all chars from the fuzzy string in them.
4660 pub fuzzy_contents: Option<&'a str>,
4661 /// Skips words that start with a digit.
4662 pub skip_digits: bool,
4663 /// Buffer offset range, to look for words.
4664 pub range: Range<usize>,
4665}
4666
4667fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4668 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4669}
4670
4671fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4672 let mut result = IndentSize::spaces(0);
4673 for c in text {
4674 let kind = match c {
4675 ' ' => IndentKind::Space,
4676 '\t' => IndentKind::Tab,
4677 _ => break,
4678 };
4679 if result.len == 0 {
4680 result.kind = kind;
4681 }
4682 result.len += 1;
4683 }
4684 result
4685}
4686
4687impl Clone for BufferSnapshot {
4688 fn clone(&self) -> Self {
4689 Self {
4690 text: self.text.clone(),
4691 syntax: self.syntax.clone(),
4692 file: self.file.clone(),
4693 remote_selections: self.remote_selections.clone(),
4694 diagnostics: self.diagnostics.clone(),
4695 language: self.language.clone(),
4696 non_text_state_update_count: self.non_text_state_update_count,
4697 }
4698 }
4699}
4700
4701impl Deref for BufferSnapshot {
4702 type Target = text::BufferSnapshot;
4703
4704 fn deref(&self) -> &Self::Target {
4705 &self.text
4706 }
4707}
4708
4709unsafe impl Send for BufferChunks<'_> {}
4710
4711impl<'a> BufferChunks<'a> {
4712 pub(crate) fn new(
4713 text: &'a Rope,
4714 range: Range<usize>,
4715 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4716 diagnostics: bool,
4717 buffer_snapshot: Option<&'a BufferSnapshot>,
4718 ) -> Self {
4719 let mut highlights = None;
4720 if let Some((captures, highlight_maps)) = syntax {
4721 highlights = Some(BufferChunkHighlights {
4722 captures,
4723 next_capture: None,
4724 stack: Default::default(),
4725 highlight_maps,
4726 })
4727 }
4728
4729 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4730 let chunks = text.chunks_in_range(range.clone());
4731
4732 let mut this = BufferChunks {
4733 range,
4734 buffer_snapshot,
4735 chunks,
4736 diagnostic_endpoints,
4737 error_depth: 0,
4738 warning_depth: 0,
4739 information_depth: 0,
4740 hint_depth: 0,
4741 unnecessary_depth: 0,
4742 underline: true,
4743 highlights,
4744 };
4745 this.initialize_diagnostic_endpoints();
4746 this
4747 }
4748
4749 /// Seeks to the given byte offset in the buffer.
4750 pub fn seek(&mut self, range: Range<usize>) {
4751 let old_range = std::mem::replace(&mut self.range, range.clone());
4752 self.chunks.set_range(self.range.clone());
4753 if let Some(highlights) = self.highlights.as_mut() {
4754 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4755 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4756 highlights
4757 .stack
4758 .retain(|(end_offset, _)| *end_offset > range.start);
4759 if let Some(capture) = &highlights.next_capture
4760 && range.start >= capture.node.start_byte()
4761 {
4762 let next_capture_end = capture.node.end_byte();
4763 if range.start < next_capture_end {
4764 highlights.stack.push((
4765 next_capture_end,
4766 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4767 ));
4768 }
4769 highlights.next_capture.take();
4770 }
4771 } else if let Some(snapshot) = self.buffer_snapshot {
4772 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4773 *highlights = BufferChunkHighlights {
4774 captures,
4775 next_capture: None,
4776 stack: Default::default(),
4777 highlight_maps,
4778 };
4779 } else {
4780 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4781 // Seeking such BufferChunks is not supported.
4782 debug_assert!(
4783 false,
4784 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4785 );
4786 }
4787
4788 highlights.captures.set_byte_range(self.range.clone());
4789 self.initialize_diagnostic_endpoints();
4790 }
4791 }
4792
4793 fn initialize_diagnostic_endpoints(&mut self) {
4794 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4795 && let Some(buffer) = self.buffer_snapshot
4796 {
4797 let mut diagnostic_endpoints = Vec::new();
4798 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4799 diagnostic_endpoints.push(DiagnosticEndpoint {
4800 offset: entry.range.start,
4801 is_start: true,
4802 severity: entry.diagnostic.severity,
4803 is_unnecessary: entry.diagnostic.is_unnecessary,
4804 underline: entry.diagnostic.underline,
4805 });
4806 diagnostic_endpoints.push(DiagnosticEndpoint {
4807 offset: entry.range.end,
4808 is_start: false,
4809 severity: entry.diagnostic.severity,
4810 is_unnecessary: entry.diagnostic.is_unnecessary,
4811 underline: entry.diagnostic.underline,
4812 });
4813 }
4814 diagnostic_endpoints
4815 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4816 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4817 self.hint_depth = 0;
4818 self.error_depth = 0;
4819 self.warning_depth = 0;
4820 self.information_depth = 0;
4821 }
4822 }
4823
4824 /// The current byte offset in the buffer.
4825 pub fn offset(&self) -> usize {
4826 self.range.start
4827 }
4828
4829 pub fn range(&self) -> Range<usize> {
4830 self.range.clone()
4831 }
4832
4833 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4834 let depth = match endpoint.severity {
4835 DiagnosticSeverity::ERROR => &mut self.error_depth,
4836 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4837 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4838 DiagnosticSeverity::HINT => &mut self.hint_depth,
4839 _ => return,
4840 };
4841 if endpoint.is_start {
4842 *depth += 1;
4843 } else {
4844 *depth -= 1;
4845 }
4846
4847 if endpoint.is_unnecessary {
4848 if endpoint.is_start {
4849 self.unnecessary_depth += 1;
4850 } else {
4851 self.unnecessary_depth -= 1;
4852 }
4853 }
4854 }
4855
4856 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4857 if self.error_depth > 0 {
4858 Some(DiagnosticSeverity::ERROR)
4859 } else if self.warning_depth > 0 {
4860 Some(DiagnosticSeverity::WARNING)
4861 } else if self.information_depth > 0 {
4862 Some(DiagnosticSeverity::INFORMATION)
4863 } else if self.hint_depth > 0 {
4864 Some(DiagnosticSeverity::HINT)
4865 } else {
4866 None
4867 }
4868 }
4869
4870 fn current_code_is_unnecessary(&self) -> bool {
4871 self.unnecessary_depth > 0
4872 }
4873}
4874
4875impl<'a> Iterator for BufferChunks<'a> {
4876 type Item = Chunk<'a>;
4877
4878 fn next(&mut self) -> Option<Self::Item> {
4879 let mut next_capture_start = usize::MAX;
4880 let mut next_diagnostic_endpoint = usize::MAX;
4881
4882 if let Some(highlights) = self.highlights.as_mut() {
4883 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4884 if *parent_capture_end <= self.range.start {
4885 highlights.stack.pop();
4886 } else {
4887 break;
4888 }
4889 }
4890
4891 if highlights.next_capture.is_none() {
4892 highlights.next_capture = highlights.captures.next();
4893 }
4894
4895 while let Some(capture) = highlights.next_capture.as_ref() {
4896 if self.range.start < capture.node.start_byte() {
4897 next_capture_start = capture.node.start_byte();
4898 break;
4899 } else {
4900 let highlight_id =
4901 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4902 highlights
4903 .stack
4904 .push((capture.node.end_byte(), highlight_id));
4905 highlights.next_capture = highlights.captures.next();
4906 }
4907 }
4908 }
4909
4910 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4911 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4912 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4913 if endpoint.offset <= self.range.start {
4914 self.update_diagnostic_depths(endpoint);
4915 diagnostic_endpoints.next();
4916 self.underline = endpoint.underline;
4917 } else {
4918 next_diagnostic_endpoint = endpoint.offset;
4919 break;
4920 }
4921 }
4922 }
4923 self.diagnostic_endpoints = diagnostic_endpoints;
4924
4925 if let Some(chunk) = self.chunks.peek() {
4926 let chunk_start = self.range.start;
4927 let mut chunk_end = (self.chunks.offset() + chunk.len())
4928 .min(next_capture_start)
4929 .min(next_diagnostic_endpoint);
4930 let mut highlight_id = None;
4931 if let Some(highlights) = self.highlights.as_ref()
4932 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4933 {
4934 chunk_end = chunk_end.min(*parent_capture_end);
4935 highlight_id = Some(*parent_highlight_id);
4936 }
4937
4938 let slice =
4939 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4940 self.range.start = chunk_end;
4941 if self.range.start == self.chunks.offset() + chunk.len() {
4942 self.chunks.next().unwrap();
4943 }
4944
4945 Some(Chunk {
4946 text: slice,
4947 syntax_highlight_id: highlight_id,
4948 underline: self.underline,
4949 diagnostic_severity: self.current_diagnostic_severity(),
4950 is_unnecessary: self.current_code_is_unnecessary(),
4951 ..Chunk::default()
4952 })
4953 } else {
4954 None
4955 }
4956 }
4957}
4958
4959impl operation_queue::Operation for Operation {
4960 fn lamport_timestamp(&self) -> clock::Lamport {
4961 match self {
4962 Operation::Buffer(_) => {
4963 unreachable!("buffer operations should never be deferred at this layer")
4964 }
4965 Operation::UpdateDiagnostics {
4966 lamport_timestamp, ..
4967 }
4968 | Operation::UpdateSelections {
4969 lamport_timestamp, ..
4970 }
4971 | Operation::UpdateCompletionTriggers {
4972 lamport_timestamp, ..
4973 }
4974 | Operation::UpdateLineEnding {
4975 lamport_timestamp, ..
4976 } => *lamport_timestamp,
4977 }
4978 }
4979}
4980
4981impl Default for Diagnostic {
4982 fn default() -> Self {
4983 Self {
4984 source: Default::default(),
4985 source_kind: DiagnosticSourceKind::Other,
4986 code: None,
4987 code_description: None,
4988 severity: DiagnosticSeverity::ERROR,
4989 message: Default::default(),
4990 markdown: None,
4991 group_id: 0,
4992 is_primary: false,
4993 is_disk_based: false,
4994 is_unnecessary: false,
4995 underline: true,
4996 data: None,
4997 }
4998 }
4999}
5000
5001impl IndentSize {
5002 /// Returns an [`IndentSize`] representing the given spaces.
5003 pub fn spaces(len: u32) -> Self {
5004 Self {
5005 len,
5006 kind: IndentKind::Space,
5007 }
5008 }
5009
5010 /// Returns an [`IndentSize`] representing a tab.
5011 pub fn tab() -> Self {
5012 Self {
5013 len: 1,
5014 kind: IndentKind::Tab,
5015 }
5016 }
5017
5018 /// An iterator over the characters represented by this [`IndentSize`].
5019 pub fn chars(&self) -> impl Iterator<Item = char> {
5020 iter::repeat(self.char()).take(self.len as usize)
5021 }
5022
5023 /// The character representation of this [`IndentSize`].
5024 pub fn char(&self) -> char {
5025 match self.kind {
5026 IndentKind::Space => ' ',
5027 IndentKind::Tab => '\t',
5028 }
5029 }
5030
5031 /// Consumes the current [`IndentSize`] and returns a new one that has
5032 /// been shrunk or enlarged by the given size along the given direction.
5033 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5034 match direction {
5035 Ordering::Less => {
5036 if self.kind == size.kind && self.len >= size.len {
5037 self.len -= size.len;
5038 }
5039 }
5040 Ordering::Equal => {}
5041 Ordering::Greater => {
5042 if self.len == 0 {
5043 self = size;
5044 } else if self.kind == size.kind {
5045 self.len += size.len;
5046 }
5047 }
5048 }
5049 self
5050 }
5051
5052 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5053 match self.kind {
5054 IndentKind::Space => self.len as usize,
5055 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5056 }
5057 }
5058}
5059
5060#[cfg(any(test, feature = "test-support"))]
5061pub struct TestFile {
5062 pub path: Arc<Path>,
5063 pub root_name: String,
5064 pub local_root: Option<PathBuf>,
5065}
5066
5067#[cfg(any(test, feature = "test-support"))]
5068impl File for TestFile {
5069 fn path(&self) -> &Arc<Path> {
5070 &self.path
5071 }
5072
5073 fn full_path(&self, _: &gpui::App) -> PathBuf {
5074 PathBuf::from(&self.root_name).join(self.path.as_ref())
5075 }
5076
5077 fn as_local(&self) -> Option<&dyn LocalFile> {
5078 if self.local_root.is_some() {
5079 Some(self)
5080 } else {
5081 None
5082 }
5083 }
5084
5085 fn disk_state(&self) -> DiskState {
5086 unimplemented!()
5087 }
5088
5089 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
5090 self.path().file_name().unwrap_or(self.root_name.as_ref())
5091 }
5092
5093 fn worktree_id(&self, _: &App) -> WorktreeId {
5094 WorktreeId::from_usize(0)
5095 }
5096
5097 fn to_proto(&self, _: &App) -> rpc::proto::File {
5098 unimplemented!()
5099 }
5100
5101 fn is_private(&self) -> bool {
5102 false
5103 }
5104}
5105
5106#[cfg(any(test, feature = "test-support"))]
5107impl LocalFile for TestFile {
5108 fn abs_path(&self, _cx: &App) -> PathBuf {
5109 PathBuf::from(self.local_root.as_ref().unwrap())
5110 .join(&self.root_name)
5111 .join(self.path.as_ref())
5112 }
5113
5114 fn load(&self, _cx: &App) -> Task<Result<String>> {
5115 unimplemented!()
5116 }
5117
5118 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5119 unimplemented!()
5120 }
5121}
5122
5123pub(crate) fn contiguous_ranges(
5124 values: impl Iterator<Item = u32>,
5125 max_len: usize,
5126) -> impl Iterator<Item = Range<u32>> {
5127 let mut values = values;
5128 let mut current_range: Option<Range<u32>> = None;
5129 std::iter::from_fn(move || {
5130 loop {
5131 if let Some(value) = values.next() {
5132 if let Some(range) = &mut current_range
5133 && value == range.end
5134 && range.len() < max_len
5135 {
5136 range.end += 1;
5137 continue;
5138 }
5139
5140 let prev_range = current_range.clone();
5141 current_range = Some(value..(value + 1));
5142 if prev_range.is_some() {
5143 return prev_range;
5144 }
5145 } else {
5146 return current_range.take();
5147 }
5148 }
5149 })
5150}
5151
5152#[derive(Default, Debug)]
5153pub struct CharClassifier {
5154 scope: Option<LanguageScope>,
5155 for_completion: bool,
5156 ignore_punctuation: bool,
5157}
5158
5159impl CharClassifier {
5160 pub fn new(scope: Option<LanguageScope>) -> Self {
5161 Self {
5162 scope,
5163 for_completion: false,
5164 ignore_punctuation: false,
5165 }
5166 }
5167
5168 pub fn for_completion(self, for_completion: bool) -> Self {
5169 Self {
5170 for_completion,
5171 ..self
5172 }
5173 }
5174
5175 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5176 Self {
5177 ignore_punctuation,
5178 ..self
5179 }
5180 }
5181
5182 pub fn is_whitespace(&self, c: char) -> bool {
5183 self.kind(c) == CharKind::Whitespace
5184 }
5185
5186 pub fn is_word(&self, c: char) -> bool {
5187 self.kind(c) == CharKind::Word
5188 }
5189
5190 pub fn is_punctuation(&self, c: char) -> bool {
5191 self.kind(c) == CharKind::Punctuation
5192 }
5193
5194 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5195 if c.is_alphanumeric() || c == '_' {
5196 return CharKind::Word;
5197 }
5198
5199 if let Some(scope) = &self.scope {
5200 let characters = if self.for_completion {
5201 scope.completion_query_characters()
5202 } else {
5203 scope.word_characters()
5204 };
5205 if let Some(characters) = characters
5206 && characters.contains(&c)
5207 {
5208 return CharKind::Word;
5209 }
5210 }
5211
5212 if c.is_whitespace() {
5213 return CharKind::Whitespace;
5214 }
5215
5216 if ignore_punctuation {
5217 CharKind::Word
5218 } else {
5219 CharKind::Punctuation
5220 }
5221 }
5222
5223 pub fn kind(&self, c: char) -> CharKind {
5224 self.kind_with(c, self.ignore_punctuation)
5225 }
5226}
5227
5228/// Find all of the ranges of whitespace that occur at the ends of lines
5229/// in the given rope.
5230///
5231/// This could also be done with a regex search, but this implementation
5232/// avoids copying text.
5233pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5234 let mut ranges = Vec::new();
5235
5236 let mut offset = 0;
5237 let mut prev_chunk_trailing_whitespace_range = 0..0;
5238 for chunk in rope.chunks() {
5239 let mut prev_line_trailing_whitespace_range = 0..0;
5240 for (i, line) in chunk.split('\n').enumerate() {
5241 let line_end_offset = offset + line.len();
5242 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5243 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5244
5245 if i == 0 && trimmed_line_len == 0 {
5246 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5247 }
5248 if !prev_line_trailing_whitespace_range.is_empty() {
5249 ranges.push(prev_line_trailing_whitespace_range);
5250 }
5251
5252 offset = line_end_offset + 1;
5253 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5254 }
5255
5256 offset -= 1;
5257 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5258 }
5259
5260 if !prev_chunk_trailing_whitespace_range.is_empty() {
5261 ranges.push(prev_chunk_trailing_whitespace_range);
5262 }
5263
5264 ranges
5265}