1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: &'static Encoding,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(&self, cx: &App) -> Task<Result<String>>;
418
419 /// Loads the file's contents from disk.
420 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
421
422 /// Loads the file contents from disk, decoding them with the given encoding.
423 fn load_with_encoding(&self, cx: &App, encoding: &'static Encoding)
424 -> Task<Result<String>>;
425}
426
427/// The auto-indent behavior associated with an editing operation.
428/// For some editing operations, each affected line of text has its
429/// indentation recomputed. For other operations, the entire block
430/// of edited text is adjusted uniformly.
431#[derive(Clone, Debug)]
432pub enum AutoindentMode {
433 /// Indent each line of inserted text.
434 EachLine,
435 /// Apply the same indentation adjustment to all of the lines
436 /// in a given insertion.
437 Block {
438 /// The original indentation column of the first line of each
439 /// insertion, if it has been copied.
440 ///
441 /// Knowing this makes it possible to preserve the relative indentation
442 /// of every line in the insertion from when it was copied.
443 ///
444 /// If the original indent column is `a`, and the first line of insertion
445 /// is then auto-indented to column `b`, then every other line of
446 /// the insertion will be auto-indented to column `b - a`
447 original_indent_columns: Vec<Option<u32>>,
448 },
449}
450
451#[derive(Clone)]
452struct AutoindentRequest {
453 before_edit: BufferSnapshot,
454 entries: Vec<AutoindentRequestEntry>,
455 is_block_mode: bool,
456 ignore_empty_lines: bool,
457}
458
459#[derive(Debug, Clone)]
460struct AutoindentRequestEntry {
461 /// A range of the buffer whose indentation should be adjusted.
462 range: Range<Anchor>,
463 /// Whether or not these lines should be considered brand new, for the
464 /// purpose of auto-indent. When text is not new, its indentation will
465 /// only be adjusted if the suggested indentation level has *changed*
466 /// since the edit was made.
467 first_line_is_new: bool,
468 indent_size: IndentSize,
469 original_indent_column: Option<u32>,
470}
471
472#[derive(Debug)]
473struct IndentSuggestion {
474 basis_row: u32,
475 delta: Ordering,
476 within_error: bool,
477}
478
479struct BufferChunkHighlights<'a> {
480 captures: SyntaxMapCaptures<'a>,
481 next_capture: Option<SyntaxMapCapture<'a>>,
482 stack: Vec<(usize, HighlightId)>,
483 highlight_maps: Vec<HighlightMap>,
484}
485
486/// An iterator that yields chunks of a buffer's text, along with their
487/// syntax highlights and diagnostic status.
488pub struct BufferChunks<'a> {
489 buffer_snapshot: Option<&'a BufferSnapshot>,
490 range: Range<usize>,
491 chunks: text::Chunks<'a>,
492 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
493 error_depth: usize,
494 warning_depth: usize,
495 information_depth: usize,
496 hint_depth: usize,
497 unnecessary_depth: usize,
498 underline: bool,
499 highlights: Option<BufferChunkHighlights<'a>>,
500}
501
502/// A chunk of a buffer's text, along with its syntax highlight and
503/// diagnostic status.
504#[derive(Clone, Debug, Default)]
505pub struct Chunk<'a> {
506 /// The text of the chunk.
507 pub text: &'a str,
508 /// The syntax highlighting style of the chunk.
509 pub syntax_highlight_id: Option<HighlightId>,
510 /// The highlight style that has been applied to this chunk in
511 /// the editor.
512 pub highlight_style: Option<HighlightStyle>,
513 /// The severity of diagnostic associated with this chunk, if any.
514 pub diagnostic_severity: Option<DiagnosticSeverity>,
515 /// A bitset of which characters are tabs in this string.
516 pub tabs: u128,
517 /// Bitmap of character indices in this chunk
518 pub chars: u128,
519 /// Whether this chunk of text is marked as unnecessary.
520 pub is_unnecessary: bool,
521 /// Whether this chunk of text was originally a tab character.
522 pub is_tab: bool,
523 /// Whether this chunk of text was originally an inlay.
524 pub is_inlay: bool,
525 /// Whether to underline the corresponding text range in the editor.
526 pub underline: bool,
527}
528
529/// A set of edits to a given version of a buffer, computed asynchronously.
530#[derive(Debug)]
531pub struct Diff {
532 pub base_version: clock::Global,
533 pub line_ending: LineEnding,
534 pub edits: Vec<(Range<usize>, Arc<str>)>,
535}
536
537#[derive(Debug, Clone, Copy)]
538pub(crate) struct DiagnosticEndpoint {
539 offset: usize,
540 is_start: bool,
541 underline: bool,
542 severity: DiagnosticSeverity,
543 is_unnecessary: bool,
544}
545
546/// A class of characters, used for characterizing a run of text.
547#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
548pub enum CharKind {
549 /// Whitespace.
550 Whitespace,
551 /// Punctuation.
552 Punctuation,
553 /// Word.
554 Word,
555}
556
557/// Context for character classification within a specific scope.
558#[derive(Copy, Clone, Eq, PartialEq, Debug)]
559pub enum CharScopeContext {
560 /// Character classification for completion queries.
561 ///
562 /// This context treats certain characters as word constituents that would
563 /// normally be considered punctuation, such as '-' in Tailwind classes
564 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
565 Completion,
566 /// Character classification for linked edits.
567 ///
568 /// This context handles characters that should be treated as part of
569 /// identifiers during linked editing operations, such as '.' in JSX
570 /// component names like `<Animated.View>`.
571 LinkedEdit,
572}
573
574/// A runnable is a set of data about a region that could be resolved into a task
575pub struct Runnable {
576 pub tags: SmallVec<[RunnableTag; 1]>,
577 pub language: Arc<Language>,
578 pub buffer: BufferId,
579}
580
581#[derive(Default, Clone, Debug)]
582pub struct HighlightedText {
583 pub text: SharedString,
584 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587#[derive(Default, Debug)]
588struct HighlightedTextBuilder {
589 pub text: String,
590 highlights: Vec<(Range<usize>, HighlightStyle)>,
591}
592
593impl HighlightedText {
594 pub fn from_buffer_range<T: ToOffset>(
595 range: Range<T>,
596 snapshot: &text::BufferSnapshot,
597 syntax_snapshot: &SyntaxSnapshot,
598 override_style: Option<HighlightStyle>,
599 syntax_theme: &SyntaxTheme,
600 ) -> Self {
601 let mut highlighted_text = HighlightedTextBuilder::default();
602 highlighted_text.add_text_from_buffer_range(
603 range,
604 snapshot,
605 syntax_snapshot,
606 override_style,
607 syntax_theme,
608 );
609 highlighted_text.build()
610 }
611
612 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
613 gpui::StyledText::new(self.text.clone())
614 .with_default_highlights(default_style, self.highlights.iter().cloned())
615 }
616
617 /// Returns the first line without leading whitespace unless highlighted
618 /// and a boolean indicating if there are more lines after
619 pub fn first_line_preview(self) -> (Self, bool) {
620 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
621 let first_line = &self.text[..newline_ix];
622
623 // Trim leading whitespace, unless an edit starts prior to it.
624 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
625 if let Some((first_highlight_range, _)) = self.highlights.first() {
626 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
627 }
628
629 let preview_text = &first_line[preview_start_ix..];
630 let preview_highlights = self
631 .highlights
632 .into_iter()
633 .skip_while(|(range, _)| range.end <= preview_start_ix)
634 .take_while(|(range, _)| range.start < newline_ix)
635 .filter_map(|(mut range, highlight)| {
636 range.start = range.start.saturating_sub(preview_start_ix);
637 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
638 if range.is_empty() {
639 None
640 } else {
641 Some((range, highlight))
642 }
643 });
644
645 let preview = Self {
646 text: SharedString::new(preview_text),
647 highlights: preview_highlights.collect(),
648 };
649
650 (preview, self.text.len() > newline_ix)
651 }
652}
653
654impl HighlightedTextBuilder {
655 pub fn build(self) -> HighlightedText {
656 HighlightedText {
657 text: self.text.into(),
658 highlights: self.highlights,
659 }
660 }
661
662 pub fn add_text_from_buffer_range<T: ToOffset>(
663 &mut self,
664 range: Range<T>,
665 snapshot: &text::BufferSnapshot,
666 syntax_snapshot: &SyntaxSnapshot,
667 override_style: Option<HighlightStyle>,
668 syntax_theme: &SyntaxTheme,
669 ) {
670 let range = range.to_offset(snapshot);
671 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
672 let start = self.text.len();
673 self.text.push_str(chunk.text);
674 let end = self.text.len();
675
676 if let Some(highlight_style) = chunk
677 .syntax_highlight_id
678 .and_then(|id| id.style(syntax_theme))
679 {
680 let highlight_style = override_style.map_or(highlight_style, |override_style| {
681 highlight_style.highlight(override_style)
682 });
683 self.highlights.push((start..end, highlight_style));
684 } else if let Some(override_style) = override_style {
685 self.highlights.push((start..end, override_style));
686 }
687 }
688 }
689
690 fn highlighted_chunks<'a>(
691 range: Range<usize>,
692 snapshot: &'a text::BufferSnapshot,
693 syntax_snapshot: &'a SyntaxSnapshot,
694 ) -> BufferChunks<'a> {
695 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
696 grammar
697 .highlights_config
698 .as_ref()
699 .map(|config| &config.query)
700 });
701
702 let highlight_maps = captures
703 .grammars()
704 .iter()
705 .map(|grammar| grammar.highlight_map())
706 .collect();
707
708 BufferChunks::new(
709 snapshot.as_rope(),
710 range,
711 Some((captures, highlight_maps)),
712 false,
713 None,
714 )
715 }
716}
717
718#[derive(Clone)]
719pub struct EditPreview {
720 old_snapshot: text::BufferSnapshot,
721 applied_edits_snapshot: text::BufferSnapshot,
722 syntax_snapshot: SyntaxSnapshot,
723}
724
725impl EditPreview {
726 pub fn highlight_edits(
727 &self,
728 current_snapshot: &BufferSnapshot,
729 edits: &[(Range<Anchor>, String)],
730 include_deletions: bool,
731 cx: &App,
732 ) -> HighlightedText {
733 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
734 return HighlightedText::default();
735 };
736
737 let mut highlighted_text = HighlightedTextBuilder::default();
738
739 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
740
741 let insertion_highlight_style = HighlightStyle {
742 background_color: Some(cx.theme().status().created_background),
743 ..Default::default()
744 };
745 let deletion_highlight_style = HighlightStyle {
746 background_color: Some(cx.theme().status().deleted_background),
747 ..Default::default()
748 };
749 let syntax_theme = cx.theme().syntax();
750
751 for (range, edit_text) in edits {
752 let edit_new_end_in_preview_snapshot = range
753 .end
754 .bias_right(&self.old_snapshot)
755 .to_offset(&self.applied_edits_snapshot);
756 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
757
758 let unchanged_range_in_preview_snapshot =
759 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
760 if !unchanged_range_in_preview_snapshot.is_empty() {
761 highlighted_text.add_text_from_buffer_range(
762 unchanged_range_in_preview_snapshot,
763 &self.applied_edits_snapshot,
764 &self.syntax_snapshot,
765 None,
766 syntax_theme,
767 );
768 }
769
770 let range_in_current_snapshot = range.to_offset(current_snapshot);
771 if include_deletions && !range_in_current_snapshot.is_empty() {
772 highlighted_text.add_text_from_buffer_range(
773 range_in_current_snapshot,
774 ¤t_snapshot.text,
775 ¤t_snapshot.syntax,
776 Some(deletion_highlight_style),
777 syntax_theme,
778 );
779 }
780
781 if !edit_text.is_empty() {
782 highlighted_text.add_text_from_buffer_range(
783 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
784 &self.applied_edits_snapshot,
785 &self.syntax_snapshot,
786 Some(insertion_highlight_style),
787 syntax_theme,
788 );
789 }
790
791 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
792 }
793
794 highlighted_text.add_text_from_buffer_range(
795 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
796 &self.applied_edits_snapshot,
797 &self.syntax_snapshot,
798 None,
799 syntax_theme,
800 );
801
802 highlighted_text.build()
803 }
804
805 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
806 let (first, _) = edits.first()?;
807 let (last, _) = edits.last()?;
808
809 let start = first
810 .start
811 .bias_left(&self.old_snapshot)
812 .to_point(&self.applied_edits_snapshot);
813 let end = last
814 .end
815 .bias_right(&self.old_snapshot)
816 .to_point(&self.applied_edits_snapshot);
817
818 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
819 let range = Point::new(start.row, 0)
820 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
821
822 Some(range.to_offset(&self.applied_edits_snapshot))
823 }
824}
825
826#[derive(Clone, Debug, PartialEq, Eq)]
827pub struct BracketMatch {
828 pub open_range: Range<usize>,
829 pub close_range: Range<usize>,
830 pub newline_only: bool,
831}
832
833impl Buffer {
834 /// Create a new buffer with the given base text.
835 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
836 Self::build(
837 TextBuffer::new(
838 ReplicaId::LOCAL,
839 cx.entity_id().as_non_zero_u64().into(),
840 base_text.into(),
841 &cx.background_executor(),
842 ),
843 None,
844 Capability::ReadWrite,
845 )
846 }
847
848 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
849 pub fn local_normalized(
850 base_text_normalized: Rope,
851 line_ending: LineEnding,
852 cx: &Context<Self>,
853 ) -> Self {
854 Self::build(
855 TextBuffer::new_normalized(
856 ReplicaId::LOCAL,
857 cx.entity_id().as_non_zero_u64().into(),
858 line_ending,
859 base_text_normalized,
860 ),
861 None,
862 Capability::ReadWrite,
863 )
864 }
865
866 /// Create a new buffer that is a replica of a remote buffer.
867 pub fn remote(
868 remote_id: BufferId,
869 replica_id: ReplicaId,
870 capability: Capability,
871 base_text: impl Into<String>,
872 cx: &BackgroundExecutor,
873 ) -> Self {
874 Self::build(
875 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
876 None,
877 capability,
878 )
879 }
880
881 /// Create a new buffer that is a replica of a remote buffer, populating its
882 /// state from the given protobuf message.
883 pub fn from_proto(
884 replica_id: ReplicaId,
885 capability: Capability,
886 message: proto::BufferState,
887 file: Option<Arc<dyn File>>,
888 cx: &BackgroundExecutor,
889 ) -> Result<Self> {
890 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
891 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
892 let mut this = Self::build(buffer, file, capability);
893 this.text.set_line_ending(proto::deserialize_line_ending(
894 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
895 ));
896 this.saved_version = proto::deserialize_version(&message.saved_version);
897 this.saved_mtime = message.saved_mtime.map(|time| time.into());
898 Ok(this)
899 }
900
901 /// Serialize the buffer's state to a protobuf message.
902 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
903 proto::BufferState {
904 id: self.remote_id().into(),
905 file: self.file.as_ref().map(|f| f.to_proto(cx)),
906 base_text: self.base_text().to_string(),
907 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
908 saved_version: proto::serialize_version(&self.saved_version),
909 saved_mtime: self.saved_mtime.map(|time| time.into()),
910 }
911 }
912
913 /// Serialize as protobufs all of the changes to the buffer since the given version.
914 pub fn serialize_ops(
915 &self,
916 since: Option<clock::Global>,
917 cx: &App,
918 ) -> Task<Vec<proto::Operation>> {
919 let mut operations = Vec::new();
920 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
921
922 operations.extend(self.remote_selections.iter().map(|(_, set)| {
923 proto::serialize_operation(&Operation::UpdateSelections {
924 selections: set.selections.clone(),
925 lamport_timestamp: set.lamport_timestamp,
926 line_mode: set.line_mode,
927 cursor_shape: set.cursor_shape,
928 })
929 }));
930
931 for (server_id, diagnostics) in &self.diagnostics {
932 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
933 lamport_timestamp: self.diagnostics_timestamp,
934 server_id: *server_id,
935 diagnostics: diagnostics.iter().cloned().collect(),
936 }));
937 }
938
939 for (server_id, completions) in &self.completion_triggers_per_language_server {
940 operations.push(proto::serialize_operation(
941 &Operation::UpdateCompletionTriggers {
942 triggers: completions.iter().cloned().collect(),
943 lamport_timestamp: self.completion_triggers_timestamp,
944 server_id: *server_id,
945 },
946 ));
947 }
948
949 let text_operations = self.text.operations().clone();
950 cx.background_spawn(async move {
951 let since = since.unwrap_or_default();
952 operations.extend(
953 text_operations
954 .iter()
955 .filter(|(_, op)| !since.observed(op.timestamp()))
956 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
957 );
958 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
959 operations
960 })
961 }
962
963 /// Assign a language to the buffer, returning the buffer.
964 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
965 self.set_language(Some(language), cx);
966 self
967 }
968
969 /// Returns the [`Capability`] of this buffer.
970 pub fn capability(&self) -> Capability {
971 self.capability
972 }
973
974 /// Whether this buffer can only be read.
975 pub fn read_only(&self) -> bool {
976 self.capability == Capability::ReadOnly
977 }
978
979 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
980 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
981 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
982 let snapshot = buffer.snapshot();
983 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
984 Self {
985 saved_mtime,
986 saved_version: buffer.version(),
987 preview_version: buffer.version(),
988 reload_task: None,
989 transaction_depth: 0,
990 was_dirty_before_starting_transaction: None,
991 has_unsaved_edits: Cell::new((buffer.version(), false)),
992 text: buffer,
993 branch_state: None,
994 file,
995 capability,
996 syntax_map,
997 reparse: None,
998 non_text_state_update_count: 0,
999 sync_parse_timeout: Duration::from_millis(1),
1000 parse_status: watch::channel(ParseStatus::Idle),
1001 autoindent_requests: Default::default(),
1002 wait_for_autoindent_txs: Default::default(),
1003 pending_autoindent: Default::default(),
1004 language: None,
1005 remote_selections: Default::default(),
1006 diagnostics: Default::default(),
1007 diagnostics_timestamp: Lamport::MIN,
1008 completion_triggers: Default::default(),
1009 completion_triggers_per_language_server: Default::default(),
1010 completion_triggers_timestamp: Lamport::MIN,
1011 deferred_ops: OperationQueue::new(),
1012 has_conflict: false,
1013 change_bits: Default::default(),
1014 _subscriptions: Vec::new(),
1015 encoding: encoding_rs::UTF_8,
1016 }
1017 }
1018
1019 pub fn build_snapshot(
1020 text: Rope,
1021 language: Option<Arc<Language>>,
1022 language_registry: Option<Arc<LanguageRegistry>>,
1023 cx: &mut App,
1024 ) -> impl Future<Output = BufferSnapshot> + use<> {
1025 let entity_id = cx.reserve_entity::<Self>().entity_id();
1026 let buffer_id = entity_id.as_non_zero_u64().into();
1027 async move {
1028 let text =
1029 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1030 .snapshot();
1031 let mut syntax = SyntaxMap::new(&text).snapshot();
1032 if let Some(language) = language.clone() {
1033 let language_registry = language_registry.clone();
1034 syntax.reparse(&text, language_registry, language);
1035 }
1036 BufferSnapshot {
1037 text,
1038 syntax,
1039 file: None,
1040 diagnostics: Default::default(),
1041 remote_selections: Default::default(),
1042 language,
1043 non_text_state_update_count: 0,
1044 }
1045 }
1046 }
1047
1048 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1049 let entity_id = cx.reserve_entity::<Self>().entity_id();
1050 let buffer_id = entity_id.as_non_zero_u64().into();
1051 let text = TextBuffer::new_normalized(
1052 ReplicaId::LOCAL,
1053 buffer_id,
1054 Default::default(),
1055 Rope::new(),
1056 )
1057 .snapshot();
1058 let syntax = SyntaxMap::new(&text).snapshot();
1059 BufferSnapshot {
1060 text,
1061 syntax,
1062 file: None,
1063 diagnostics: Default::default(),
1064 remote_selections: Default::default(),
1065 language: None,
1066 non_text_state_update_count: 0,
1067 }
1068 }
1069
1070 #[cfg(any(test, feature = "test-support"))]
1071 pub fn build_snapshot_sync(
1072 text: Rope,
1073 language: Option<Arc<Language>>,
1074 language_registry: Option<Arc<LanguageRegistry>>,
1075 cx: &mut App,
1076 ) -> BufferSnapshot {
1077 let entity_id = cx.reserve_entity::<Self>().entity_id();
1078 let buffer_id = entity_id.as_non_zero_u64().into();
1079 let text =
1080 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1081 .snapshot();
1082 let mut syntax = SyntaxMap::new(&text).snapshot();
1083 if let Some(language) = language.clone() {
1084 syntax.reparse(&text, language_registry, language);
1085 }
1086 BufferSnapshot {
1087 text,
1088 syntax,
1089 file: None,
1090 diagnostics: Default::default(),
1091 remote_selections: Default::default(),
1092 language,
1093 non_text_state_update_count: 0,
1094 }
1095 }
1096
1097 /// Retrieve a snapshot of the buffer's current state. This is computationally
1098 /// cheap, and allows reading from the buffer on a background thread.
1099 pub fn snapshot(&self) -> BufferSnapshot {
1100 let text = self.text.snapshot();
1101 let mut syntax_map = self.syntax_map.lock();
1102 syntax_map.interpolate(&text);
1103 let syntax = syntax_map.snapshot();
1104
1105 BufferSnapshot {
1106 text,
1107 syntax,
1108 file: self.file.clone(),
1109 remote_selections: self.remote_selections.clone(),
1110 diagnostics: self.diagnostics.clone(),
1111 language: self.language.clone(),
1112 non_text_state_update_count: self.non_text_state_update_count,
1113 }
1114 }
1115
1116 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1117 let this = cx.entity();
1118 cx.new(|cx| {
1119 let mut branch = Self {
1120 branch_state: Some(BufferBranchState {
1121 base_buffer: this.clone(),
1122 merged_operations: Default::default(),
1123 }),
1124 language: self.language.clone(),
1125 has_conflict: self.has_conflict,
1126 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1127 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1128 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1129 };
1130 if let Some(language_registry) = self.language_registry() {
1131 branch.set_language_registry(language_registry);
1132 }
1133
1134 // Reparse the branch buffer so that we get syntax highlighting immediately.
1135 branch.reparse(cx);
1136
1137 branch
1138 })
1139 }
1140
1141 pub fn preview_edits(
1142 &self,
1143 edits: Arc<[(Range<Anchor>, String)]>,
1144 cx: &App,
1145 ) -> Task<EditPreview> {
1146 let registry = self.language_registry();
1147 let language = self.language().cloned();
1148 let old_snapshot = self.text.snapshot();
1149 let mut branch_buffer = self.text.branch();
1150 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1151 let executor = cx.background_executor().clone();
1152 cx.background_spawn(async move {
1153 if !edits.is_empty() {
1154 if let Some(language) = language.clone() {
1155 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1156 }
1157
1158 branch_buffer.edit(edits.iter().cloned(), &executor);
1159 let snapshot = branch_buffer.snapshot();
1160 syntax_snapshot.interpolate(&snapshot);
1161
1162 if let Some(language) = language {
1163 syntax_snapshot.reparse(&snapshot, registry, language);
1164 }
1165 }
1166 EditPreview {
1167 old_snapshot,
1168 applied_edits_snapshot: branch_buffer.snapshot(),
1169 syntax_snapshot,
1170 }
1171 })
1172 }
1173
1174 /// Applies all of the changes in this buffer that intersect any of the
1175 /// given `ranges` to its base buffer.
1176 ///
1177 /// If `ranges` is empty, then all changes will be applied. This buffer must
1178 /// be a branch buffer to call this method.
1179 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1180 let Some(base_buffer) = self.base_buffer() else {
1181 debug_panic!("not a branch buffer");
1182 return;
1183 };
1184
1185 let mut ranges = if ranges.is_empty() {
1186 &[0..usize::MAX]
1187 } else {
1188 ranges.as_slice()
1189 }
1190 .iter()
1191 .peekable();
1192
1193 let mut edits = Vec::new();
1194 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1195 let mut is_included = false;
1196 while let Some(range) = ranges.peek() {
1197 if range.end < edit.new.start {
1198 ranges.next().unwrap();
1199 } else {
1200 if range.start <= edit.new.end {
1201 is_included = true;
1202 }
1203 break;
1204 }
1205 }
1206
1207 if is_included {
1208 edits.push((
1209 edit.old.clone(),
1210 self.text_for_range(edit.new.clone()).collect::<String>(),
1211 ));
1212 }
1213 }
1214
1215 let operation = base_buffer.update(cx, |base_buffer, cx| {
1216 // cx.emit(BufferEvent::DiffBaseChanged);
1217 base_buffer.edit(edits, None, cx)
1218 });
1219
1220 if let Some(operation) = operation
1221 && let Some(BufferBranchState {
1222 merged_operations, ..
1223 }) = &mut self.branch_state
1224 {
1225 merged_operations.push(operation);
1226 }
1227 }
1228
1229 fn on_base_buffer_event(
1230 &mut self,
1231 _: Entity<Buffer>,
1232 event: &BufferEvent,
1233 cx: &mut Context<Self>,
1234 ) {
1235 let BufferEvent::Operation { operation, .. } = event else {
1236 return;
1237 };
1238 let Some(BufferBranchState {
1239 merged_operations, ..
1240 }) = &mut self.branch_state
1241 else {
1242 return;
1243 };
1244
1245 let mut operation_to_undo = None;
1246 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1247 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1248 {
1249 merged_operations.remove(ix);
1250 operation_to_undo = Some(operation.timestamp);
1251 }
1252
1253 self.apply_ops([operation.clone()], cx);
1254
1255 if let Some(timestamp) = operation_to_undo {
1256 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1257 self.undo_operations(counts, cx);
1258 }
1259 }
1260
1261 #[cfg(test)]
1262 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1263 &self.text
1264 }
1265
1266 /// Retrieve a snapshot of the buffer's raw text, without any
1267 /// language-related state like the syntax tree or diagnostics.
1268 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1269 self.text.snapshot()
1270 }
1271
1272 /// The file associated with the buffer, if any.
1273 pub fn file(&self) -> Option<&Arc<dyn File>> {
1274 self.file.as_ref()
1275 }
1276
1277 /// The version of the buffer that was last saved or reloaded from disk.
1278 pub fn saved_version(&self) -> &clock::Global {
1279 &self.saved_version
1280 }
1281
1282 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1283 pub fn saved_mtime(&self) -> Option<MTime> {
1284 self.saved_mtime
1285 }
1286
1287 /// Assign a language to the buffer.
1288 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1289 self.non_text_state_update_count += 1;
1290 self.syntax_map.lock().clear(&self.text);
1291 self.language = language;
1292 self.was_changed();
1293 self.reparse(cx);
1294 cx.emit(BufferEvent::LanguageChanged);
1295 }
1296
1297 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1298 /// other languages if parts of the buffer are written in different languages.
1299 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1300 self.syntax_map
1301 .lock()
1302 .set_language_registry(language_registry);
1303 }
1304
1305 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1306 self.syntax_map.lock().language_registry()
1307 }
1308
1309 /// Assign the line ending type to the buffer.
1310 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1311 self.text.set_line_ending(line_ending);
1312
1313 let lamport_timestamp = self.text.lamport_clock.tick();
1314 self.send_operation(
1315 Operation::UpdateLineEnding {
1316 line_ending,
1317 lamport_timestamp,
1318 },
1319 true,
1320 cx,
1321 );
1322 }
1323
1324 /// Assign the buffer a new [`Capability`].
1325 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1326 if self.capability != capability {
1327 self.capability = capability;
1328 cx.emit(BufferEvent::CapabilityChanged)
1329 }
1330 }
1331
1332 /// This method is called to signal that the buffer has been saved.
1333 pub fn did_save(
1334 &mut self,
1335 version: clock::Global,
1336 mtime: Option<MTime>,
1337 cx: &mut Context<Self>,
1338 ) {
1339 self.saved_version = version.clone();
1340 self.has_unsaved_edits.set((version, false));
1341 self.has_conflict = false;
1342 self.saved_mtime = mtime;
1343 self.was_changed();
1344 cx.emit(BufferEvent::Saved);
1345 cx.notify();
1346 }
1347
1348 /// Reloads the contents of the buffer from disk.
1349 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1350 let (tx, rx) = futures::channel::oneshot::channel();
1351 let encoding = self.encoding;
1352 let prev_version = self.text.version();
1353 self.reload_task = Some(cx.spawn(async move |this, cx| {
1354 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1355 let file = this.file.as_ref()?.as_local()?;
1356 Some((
1357 file.disk_state().mtime(),
1358 file.load_with_encoding(cx, encoding),
1359 ))
1360 })?
1361 else {
1362 return Ok(());
1363 };
1364
1365 let new_text = new_text.await?;
1366 let diff = this
1367 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1368 .await;
1369 this.update(cx, |this, cx| {
1370 if this.version() == diff.base_version {
1371 this.finalize_last_transaction();
1372 this.apply_diff(diff, cx);
1373 tx.send(this.finalize_last_transaction().cloned()).ok();
1374 this.has_conflict = false;
1375 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1376 } else {
1377 if !diff.edits.is_empty()
1378 || this
1379 .edits_since::<usize>(&diff.base_version)
1380 .next()
1381 .is_some()
1382 {
1383 this.has_conflict = true;
1384 }
1385
1386 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1387 }
1388
1389 this.reload_task.take();
1390 })
1391 }));
1392 rx
1393 }
1394
1395 /// This method is called to signal that the buffer has been reloaded.
1396 pub fn did_reload(
1397 &mut self,
1398 version: clock::Global,
1399 line_ending: LineEnding,
1400 mtime: Option<MTime>,
1401 cx: &mut Context<Self>,
1402 ) {
1403 self.saved_version = version;
1404 self.has_unsaved_edits
1405 .set((self.saved_version.clone(), false));
1406 self.text.set_line_ending(line_ending);
1407 self.saved_mtime = mtime;
1408 cx.emit(BufferEvent::Reloaded);
1409 cx.notify();
1410 }
1411
1412 /// Updates the [`File`] backing this buffer. This should be called when
1413 /// the file has changed or has been deleted.
1414 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1415 let was_dirty = self.is_dirty();
1416 let mut file_changed = false;
1417
1418 if let Some(old_file) = self.file.as_ref() {
1419 if new_file.path() != old_file.path() {
1420 file_changed = true;
1421 }
1422
1423 let old_state = old_file.disk_state();
1424 let new_state = new_file.disk_state();
1425 if old_state != new_state {
1426 file_changed = true;
1427 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1428 cx.emit(BufferEvent::ReloadNeeded)
1429 }
1430 }
1431 } else {
1432 file_changed = true;
1433 };
1434
1435 self.file = Some(new_file);
1436 if file_changed {
1437 self.was_changed();
1438 self.non_text_state_update_count += 1;
1439 if was_dirty != self.is_dirty() {
1440 cx.emit(BufferEvent::DirtyChanged);
1441 }
1442 cx.emit(BufferEvent::FileHandleChanged);
1443 cx.notify();
1444 }
1445 }
1446
1447 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1448 Some(self.branch_state.as_ref()?.base_buffer.clone())
1449 }
1450
1451 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1452 pub fn language(&self) -> Option<&Arc<Language>> {
1453 self.language.as_ref()
1454 }
1455
1456 /// Returns the [`Language`] at the given location.
1457 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1458 let offset = position.to_offset(self);
1459 let mut is_first = true;
1460 let start_anchor = self.anchor_before(offset);
1461 let end_anchor = self.anchor_after(offset);
1462 self.syntax_map
1463 .lock()
1464 .layers_for_range(offset..offset, &self.text, false)
1465 .filter(|layer| {
1466 if is_first {
1467 is_first = false;
1468 return true;
1469 }
1470
1471 layer
1472 .included_sub_ranges
1473 .map(|sub_ranges| {
1474 sub_ranges.iter().any(|sub_range| {
1475 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1476 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1477 !is_before_start && !is_after_end
1478 })
1479 })
1480 .unwrap_or(true)
1481 })
1482 .last()
1483 .map(|info| info.language.clone())
1484 .or_else(|| self.language.clone())
1485 }
1486
1487 /// Returns each [`Language`] for the active syntax layers at the given location.
1488 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1489 let offset = position.to_offset(self);
1490 let mut languages: Vec<Arc<Language>> = self
1491 .syntax_map
1492 .lock()
1493 .layers_for_range(offset..offset, &self.text, false)
1494 .map(|info| info.language.clone())
1495 .collect();
1496
1497 if languages.is_empty()
1498 && let Some(buffer_language) = self.language()
1499 {
1500 languages.push(buffer_language.clone());
1501 }
1502
1503 languages
1504 }
1505
1506 /// An integer version number that accounts for all updates besides
1507 /// the buffer's text itself (which is versioned via a version vector).
1508 pub fn non_text_state_update_count(&self) -> usize {
1509 self.non_text_state_update_count
1510 }
1511
1512 /// Whether the buffer is being parsed in the background.
1513 #[cfg(any(test, feature = "test-support"))]
1514 pub fn is_parsing(&self) -> bool {
1515 self.reparse.is_some()
1516 }
1517
1518 /// Indicates whether the buffer contains any regions that may be
1519 /// written in a language that hasn't been loaded yet.
1520 pub fn contains_unknown_injections(&self) -> bool {
1521 self.syntax_map.lock().contains_unknown_injections()
1522 }
1523
1524 #[cfg(any(test, feature = "test-support"))]
1525 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1526 self.sync_parse_timeout = timeout;
1527 }
1528
1529 /// Called after an edit to synchronize the buffer's main parse tree with
1530 /// the buffer's new underlying state.
1531 ///
1532 /// Locks the syntax map and interpolates the edits since the last reparse
1533 /// into the foreground syntax tree.
1534 ///
1535 /// Then takes a stable snapshot of the syntax map before unlocking it.
1536 /// The snapshot with the interpolated edits is sent to a background thread,
1537 /// where we ask Tree-sitter to perform an incremental parse.
1538 ///
1539 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1540 /// waiting on the parse to complete. As soon as it completes, we proceed
1541 /// synchronously, unless a 1ms timeout elapses.
1542 ///
1543 /// If we time out waiting on the parse, we spawn a second task waiting
1544 /// until the parse does complete and return with the interpolated tree still
1545 /// in the foreground. When the background parse completes, call back into
1546 /// the main thread and assign the foreground parse state.
1547 ///
1548 /// If the buffer or grammar changed since the start of the background parse,
1549 /// initiate an additional reparse recursively. To avoid concurrent parses
1550 /// for the same buffer, we only initiate a new parse if we are not already
1551 /// parsing in the background.
1552 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1553 if self.reparse.is_some() {
1554 return;
1555 }
1556 let language = if let Some(language) = self.language.clone() {
1557 language
1558 } else {
1559 return;
1560 };
1561
1562 let text = self.text_snapshot();
1563 let parsed_version = self.version();
1564
1565 let mut syntax_map = self.syntax_map.lock();
1566 syntax_map.interpolate(&text);
1567 let language_registry = syntax_map.language_registry();
1568 let mut syntax_snapshot = syntax_map.snapshot();
1569 drop(syntax_map);
1570
1571 let parse_task = cx.background_spawn({
1572 let language = language.clone();
1573 let language_registry = language_registry.clone();
1574 async move {
1575 syntax_snapshot.reparse(&text, language_registry, language);
1576 syntax_snapshot
1577 }
1578 });
1579
1580 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1581 match cx
1582 .background_executor()
1583 .block_with_timeout(self.sync_parse_timeout, parse_task)
1584 {
1585 Ok(new_syntax_snapshot) => {
1586 self.did_finish_parsing(new_syntax_snapshot, cx);
1587 self.reparse = None;
1588 }
1589 Err(parse_task) => {
1590 // todo(lw): hot foreground spawn
1591 self.reparse = Some(cx.spawn(async move |this, cx| {
1592 let new_syntax_map = cx.background_spawn(parse_task).await;
1593 this.update(cx, move |this, cx| {
1594 let grammar_changed = || {
1595 this.language.as_ref().is_none_or(|current_language| {
1596 !Arc::ptr_eq(&language, current_language)
1597 })
1598 };
1599 let language_registry_changed = || {
1600 new_syntax_map.contains_unknown_injections()
1601 && language_registry.is_some_and(|registry| {
1602 registry.version() != new_syntax_map.language_registry_version()
1603 })
1604 };
1605 let parse_again = this.version.changed_since(&parsed_version)
1606 || language_registry_changed()
1607 || grammar_changed();
1608 this.did_finish_parsing(new_syntax_map, cx);
1609 this.reparse = None;
1610 if parse_again {
1611 this.reparse(cx);
1612 }
1613 })
1614 .ok();
1615 }));
1616 }
1617 }
1618 }
1619
1620 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1621 self.was_changed();
1622 self.non_text_state_update_count += 1;
1623 self.syntax_map.lock().did_parse(syntax_snapshot);
1624 self.request_autoindent(cx);
1625 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1626 cx.emit(BufferEvent::Reparsed);
1627 cx.notify();
1628 }
1629
1630 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1631 self.parse_status.1.clone()
1632 }
1633
1634 /// Assign to the buffer a set of diagnostics created by a given language server.
1635 pub fn update_diagnostics(
1636 &mut self,
1637 server_id: LanguageServerId,
1638 diagnostics: DiagnosticSet,
1639 cx: &mut Context<Self>,
1640 ) {
1641 let lamport_timestamp = self.text.lamport_clock.tick();
1642 let op = Operation::UpdateDiagnostics {
1643 server_id,
1644 diagnostics: diagnostics.iter().cloned().collect(),
1645 lamport_timestamp,
1646 };
1647
1648 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1649 self.send_operation(op, true, cx);
1650 }
1651
1652 pub fn buffer_diagnostics(
1653 &self,
1654 for_server: Option<LanguageServerId>,
1655 ) -> Vec<&DiagnosticEntry<Anchor>> {
1656 match for_server {
1657 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1658 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1659 Err(_) => Vec::new(),
1660 },
1661 None => self
1662 .diagnostics
1663 .iter()
1664 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1665 .collect(),
1666 }
1667 }
1668
1669 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1670 if let Some(indent_sizes) = self.compute_autoindents() {
1671 let indent_sizes = cx.background_spawn(indent_sizes);
1672 match cx
1673 .background_executor()
1674 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1675 {
1676 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1677 Err(indent_sizes) => {
1678 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1679 let indent_sizes = indent_sizes.await;
1680 this.update(cx, |this, cx| {
1681 this.apply_autoindents(indent_sizes, cx);
1682 })
1683 .ok();
1684 }));
1685 }
1686 }
1687 } else {
1688 self.autoindent_requests.clear();
1689 for tx in self.wait_for_autoindent_txs.drain(..) {
1690 tx.send(()).ok();
1691 }
1692 }
1693 }
1694
1695 fn compute_autoindents(
1696 &self,
1697 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1698 let max_rows_between_yields = 100;
1699 let snapshot = self.snapshot();
1700 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1701 return None;
1702 }
1703
1704 let autoindent_requests = self.autoindent_requests.clone();
1705 Some(async move {
1706 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1707 for request in autoindent_requests {
1708 // Resolve each edited range to its row in the current buffer and in the
1709 // buffer before this batch of edits.
1710 let mut row_ranges = Vec::new();
1711 let mut old_to_new_rows = BTreeMap::new();
1712 let mut language_indent_sizes_by_new_row = Vec::new();
1713 for entry in &request.entries {
1714 let position = entry.range.start;
1715 let new_row = position.to_point(&snapshot).row;
1716 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1717 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1718
1719 if !entry.first_line_is_new {
1720 let old_row = position.to_point(&request.before_edit).row;
1721 old_to_new_rows.insert(old_row, new_row);
1722 }
1723 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1724 }
1725
1726 // Build a map containing the suggested indentation for each of the edited lines
1727 // with respect to the state of the buffer before these edits. This map is keyed
1728 // by the rows for these lines in the current state of the buffer.
1729 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1730 let old_edited_ranges =
1731 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1732 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1733 let mut language_indent_size = IndentSize::default();
1734 for old_edited_range in old_edited_ranges {
1735 let suggestions = request
1736 .before_edit
1737 .suggest_autoindents(old_edited_range.clone())
1738 .into_iter()
1739 .flatten();
1740 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1741 if let Some(suggestion) = suggestion {
1742 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1743
1744 // Find the indent size based on the language for this row.
1745 while let Some((row, size)) = language_indent_sizes.peek() {
1746 if *row > new_row {
1747 break;
1748 }
1749 language_indent_size = *size;
1750 language_indent_sizes.next();
1751 }
1752
1753 let suggested_indent = old_to_new_rows
1754 .get(&suggestion.basis_row)
1755 .and_then(|from_row| {
1756 Some(old_suggestions.get(from_row).copied()?.0)
1757 })
1758 .unwrap_or_else(|| {
1759 request
1760 .before_edit
1761 .indent_size_for_line(suggestion.basis_row)
1762 })
1763 .with_delta(suggestion.delta, language_indent_size);
1764 old_suggestions
1765 .insert(new_row, (suggested_indent, suggestion.within_error));
1766 }
1767 }
1768 yield_now().await;
1769 }
1770
1771 // Compute new suggestions for each line, but only include them in the result
1772 // if they differ from the old suggestion for that line.
1773 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1774 let mut language_indent_size = IndentSize::default();
1775 for (row_range, original_indent_column) in row_ranges {
1776 let new_edited_row_range = if request.is_block_mode {
1777 row_range.start..row_range.start + 1
1778 } else {
1779 row_range.clone()
1780 };
1781
1782 let suggestions = snapshot
1783 .suggest_autoindents(new_edited_row_range.clone())
1784 .into_iter()
1785 .flatten();
1786 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1787 if let Some(suggestion) = suggestion {
1788 // Find the indent size based on the language for this row.
1789 while let Some((row, size)) = language_indent_sizes.peek() {
1790 if *row > new_row {
1791 break;
1792 }
1793 language_indent_size = *size;
1794 language_indent_sizes.next();
1795 }
1796
1797 let suggested_indent = indent_sizes
1798 .get(&suggestion.basis_row)
1799 .copied()
1800 .map(|e| e.0)
1801 .unwrap_or_else(|| {
1802 snapshot.indent_size_for_line(suggestion.basis_row)
1803 })
1804 .with_delta(suggestion.delta, language_indent_size);
1805
1806 if old_suggestions.get(&new_row).is_none_or(
1807 |(old_indentation, was_within_error)| {
1808 suggested_indent != *old_indentation
1809 && (!suggestion.within_error || *was_within_error)
1810 },
1811 ) {
1812 indent_sizes.insert(
1813 new_row,
1814 (suggested_indent, request.ignore_empty_lines),
1815 );
1816 }
1817 }
1818 }
1819
1820 if let (true, Some(original_indent_column)) =
1821 (request.is_block_mode, original_indent_column)
1822 {
1823 let new_indent =
1824 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1825 *indent
1826 } else {
1827 snapshot.indent_size_for_line(row_range.start)
1828 };
1829 let delta = new_indent.len as i64 - original_indent_column as i64;
1830 if delta != 0 {
1831 for row in row_range.skip(1) {
1832 indent_sizes.entry(row).or_insert_with(|| {
1833 let mut size = snapshot.indent_size_for_line(row);
1834 if size.kind == new_indent.kind {
1835 match delta.cmp(&0) {
1836 Ordering::Greater => size.len += delta as u32,
1837 Ordering::Less => {
1838 size.len = size.len.saturating_sub(-delta as u32)
1839 }
1840 Ordering::Equal => {}
1841 }
1842 }
1843 (size, request.ignore_empty_lines)
1844 });
1845 }
1846 }
1847 }
1848
1849 yield_now().await;
1850 }
1851 }
1852
1853 indent_sizes
1854 .into_iter()
1855 .filter_map(|(row, (indent, ignore_empty_lines))| {
1856 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1857 None
1858 } else {
1859 Some((row, indent))
1860 }
1861 })
1862 .collect()
1863 })
1864 }
1865
1866 fn apply_autoindents(
1867 &mut self,
1868 indent_sizes: BTreeMap<u32, IndentSize>,
1869 cx: &mut Context<Self>,
1870 ) {
1871 self.autoindent_requests.clear();
1872 for tx in self.wait_for_autoindent_txs.drain(..) {
1873 tx.send(()).ok();
1874 }
1875
1876 let edits: Vec<_> = indent_sizes
1877 .into_iter()
1878 .filter_map(|(row, indent_size)| {
1879 let current_size = indent_size_for_line(self, row);
1880 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1881 })
1882 .collect();
1883
1884 let preserve_preview = self.preserve_preview();
1885 self.edit(edits, None, cx);
1886 if preserve_preview {
1887 self.refresh_preview();
1888 }
1889 }
1890
1891 /// Create a minimal edit that will cause the given row to be indented
1892 /// with the given size. After applying this edit, the length of the line
1893 /// will always be at least `new_size.len`.
1894 pub fn edit_for_indent_size_adjustment(
1895 row: u32,
1896 current_size: IndentSize,
1897 new_size: IndentSize,
1898 ) -> Option<(Range<Point>, String)> {
1899 if new_size.kind == current_size.kind {
1900 match new_size.len.cmp(¤t_size.len) {
1901 Ordering::Greater => {
1902 let point = Point::new(row, 0);
1903 Some((
1904 point..point,
1905 iter::repeat(new_size.char())
1906 .take((new_size.len - current_size.len) as usize)
1907 .collect::<String>(),
1908 ))
1909 }
1910
1911 Ordering::Less => Some((
1912 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1913 String::new(),
1914 )),
1915
1916 Ordering::Equal => None,
1917 }
1918 } else {
1919 Some((
1920 Point::new(row, 0)..Point::new(row, current_size.len),
1921 iter::repeat(new_size.char())
1922 .take(new_size.len as usize)
1923 .collect::<String>(),
1924 ))
1925 }
1926 }
1927
1928 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1929 /// and the given new text.
1930 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1931 let old_text = self.as_rope().clone();
1932 let base_version = self.version();
1933 cx.background_executor()
1934 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1935 let old_text = old_text.to_string();
1936 let line_ending = LineEnding::detect(&new_text);
1937 LineEnding::normalize(&mut new_text);
1938 let edits = text_diff(&old_text, &new_text);
1939 Diff {
1940 base_version,
1941 line_ending,
1942 edits,
1943 }
1944 })
1945 }
1946
1947 /// Spawns a background task that searches the buffer for any whitespace
1948 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1949 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1950 let old_text = self.as_rope().clone();
1951 let line_ending = self.line_ending();
1952 let base_version = self.version();
1953 cx.background_spawn(async move {
1954 let ranges = trailing_whitespace_ranges(&old_text);
1955 let empty = Arc::<str>::from("");
1956 Diff {
1957 base_version,
1958 line_ending,
1959 edits: ranges
1960 .into_iter()
1961 .map(|range| (range, empty.clone()))
1962 .collect(),
1963 }
1964 })
1965 }
1966
1967 /// Ensures that the buffer ends with a single newline character, and
1968 /// no other whitespace. Skips if the buffer is empty.
1969 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1970 let len = self.len();
1971 if len == 0 {
1972 return;
1973 }
1974 let mut offset = len;
1975 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1976 let non_whitespace_len = chunk
1977 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1978 .len();
1979 offset -= chunk.len();
1980 offset += non_whitespace_len;
1981 if non_whitespace_len != 0 {
1982 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1983 return;
1984 }
1985 break;
1986 }
1987 }
1988 self.edit([(offset..len, "\n")], None, cx);
1989 }
1990
1991 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1992 /// calculated, then adjust the diff to account for those changes, and discard any
1993 /// parts of the diff that conflict with those changes.
1994 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1995 let snapshot = self.snapshot();
1996 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1997 let mut delta = 0;
1998 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1999 while let Some(edit_since) = edits_since.peek() {
2000 // If the edit occurs after a diff hunk, then it does not
2001 // affect that hunk.
2002 if edit_since.old.start > range.end {
2003 break;
2004 }
2005 // If the edit precedes the diff hunk, then adjust the hunk
2006 // to reflect the edit.
2007 else if edit_since.old.end < range.start {
2008 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2009 edits_since.next();
2010 }
2011 // If the edit intersects a diff hunk, then discard that hunk.
2012 else {
2013 return None;
2014 }
2015 }
2016
2017 let start = (range.start as i64 + delta) as usize;
2018 let end = (range.end as i64 + delta) as usize;
2019 Some((start..end, new_text))
2020 });
2021
2022 self.start_transaction();
2023 self.text.set_line_ending(diff.line_ending);
2024 self.edit(adjusted_edits, None, cx);
2025 self.end_transaction(cx)
2026 }
2027
2028 pub fn has_unsaved_edits(&self) -> bool {
2029 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2030
2031 if last_version == self.version {
2032 self.has_unsaved_edits
2033 .set((last_version, has_unsaved_edits));
2034 return has_unsaved_edits;
2035 }
2036
2037 let has_edits = self.has_edits_since(&self.saved_version);
2038 self.has_unsaved_edits
2039 .set((self.version.clone(), has_edits));
2040 has_edits
2041 }
2042
2043 /// Checks if the buffer has unsaved changes.
2044 pub fn is_dirty(&self) -> bool {
2045 if self.capability == Capability::ReadOnly {
2046 return false;
2047 }
2048 if self.has_conflict {
2049 return true;
2050 }
2051 match self.file.as_ref().map(|f| f.disk_state()) {
2052 Some(DiskState::New) | Some(DiskState::Deleted) => {
2053 !self.is_empty() && self.has_unsaved_edits()
2054 }
2055 _ => self.has_unsaved_edits(),
2056 }
2057 }
2058
2059 /// Checks if the buffer and its file have both changed since the buffer
2060 /// was last saved or reloaded.
2061 pub fn has_conflict(&self) -> bool {
2062 if self.has_conflict {
2063 return true;
2064 }
2065 let Some(file) = self.file.as_ref() else {
2066 return false;
2067 };
2068 match file.disk_state() {
2069 DiskState::New => false,
2070 DiskState::Present { mtime } => match self.saved_mtime {
2071 Some(saved_mtime) => {
2072 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2073 }
2074 None => true,
2075 },
2076 DiskState::Deleted => false,
2077 }
2078 }
2079
2080 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2081 pub fn subscribe(&mut self) -> Subscription {
2082 self.text.subscribe()
2083 }
2084
2085 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2086 ///
2087 /// This allows downstream code to check if the buffer's text has changed without
2088 /// waiting for an effect cycle, which would be required if using eents.
2089 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2090 if let Err(ix) = self
2091 .change_bits
2092 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2093 {
2094 self.change_bits.insert(ix, bit);
2095 }
2096 }
2097
2098 /// Set the change bit for all "listeners".
2099 fn was_changed(&mut self) {
2100 self.change_bits.retain(|change_bit| {
2101 change_bit
2102 .upgrade()
2103 .inspect(|bit| {
2104 _ = bit.replace(true);
2105 })
2106 .is_some()
2107 });
2108 }
2109
2110 /// Starts a transaction, if one is not already in-progress. When undoing or
2111 /// redoing edits, all of the edits performed within a transaction are undone
2112 /// or redone together.
2113 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2114 self.start_transaction_at(Instant::now())
2115 }
2116
2117 /// Starts a transaction, providing the current time. Subsequent transactions
2118 /// that occur within a short period of time will be grouped together. This
2119 /// is controlled by the buffer's undo grouping duration.
2120 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2121 self.transaction_depth += 1;
2122 if self.was_dirty_before_starting_transaction.is_none() {
2123 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2124 }
2125 self.text.start_transaction_at(now)
2126 }
2127
2128 /// Terminates the current transaction, if this is the outermost transaction.
2129 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2130 self.end_transaction_at(Instant::now(), cx)
2131 }
2132
2133 /// Terminates the current transaction, providing the current time. Subsequent transactions
2134 /// that occur within a short period of time will be grouped together. This
2135 /// is controlled by the buffer's undo grouping duration.
2136 pub fn end_transaction_at(
2137 &mut self,
2138 now: Instant,
2139 cx: &mut Context<Self>,
2140 ) -> Option<TransactionId> {
2141 assert!(self.transaction_depth > 0);
2142 self.transaction_depth -= 1;
2143 let was_dirty = if self.transaction_depth == 0 {
2144 self.was_dirty_before_starting_transaction.take().unwrap()
2145 } else {
2146 false
2147 };
2148 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2149 self.did_edit(&start_version, was_dirty, cx);
2150 Some(transaction_id)
2151 } else {
2152 None
2153 }
2154 }
2155
2156 /// Manually add a transaction to the buffer's undo history.
2157 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2158 self.text.push_transaction(transaction, now);
2159 }
2160
2161 /// Differs from `push_transaction` in that it does not clear the redo
2162 /// stack. Intended to be used to create a parent transaction to merge
2163 /// potential child transactions into.
2164 ///
2165 /// The caller is responsible for removing it from the undo history using
2166 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2167 /// are merged into this transaction, the caller is responsible for ensuring
2168 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2169 /// cleared is to create transactions with the usual `start_transaction` and
2170 /// `end_transaction` methods and merging the resulting transactions into
2171 /// the transaction created by this method
2172 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2173 self.text.push_empty_transaction(now)
2174 }
2175
2176 /// Prevent the last transaction from being grouped with any subsequent transactions,
2177 /// even if they occur with the buffer's undo grouping duration.
2178 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2179 self.text.finalize_last_transaction()
2180 }
2181
2182 /// Manually group all changes since a given transaction.
2183 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2184 self.text.group_until_transaction(transaction_id);
2185 }
2186
2187 /// Manually remove a transaction from the buffer's undo history
2188 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2189 self.text.forget_transaction(transaction_id)
2190 }
2191
2192 /// Retrieve a transaction from the buffer's undo history
2193 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2194 self.text.get_transaction(transaction_id)
2195 }
2196
2197 /// Manually merge two transactions in the buffer's undo history.
2198 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2199 self.text.merge_transactions(transaction, destination);
2200 }
2201
2202 /// Waits for the buffer to receive operations with the given timestamps.
2203 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2204 &mut self,
2205 edit_ids: It,
2206 ) -> impl Future<Output = Result<()>> + use<It> {
2207 self.text.wait_for_edits(edit_ids)
2208 }
2209
2210 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2211 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2212 &mut self,
2213 anchors: It,
2214 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2215 self.text.wait_for_anchors(anchors)
2216 }
2217
2218 /// Waits for the buffer to receive operations up to the given version.
2219 pub fn wait_for_version(
2220 &mut self,
2221 version: clock::Global,
2222 ) -> impl Future<Output = Result<()>> + use<> {
2223 self.text.wait_for_version(version)
2224 }
2225
2226 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2227 /// [`Buffer::wait_for_version`] to resolve with an error.
2228 pub fn give_up_waiting(&mut self) {
2229 self.text.give_up_waiting();
2230 }
2231
2232 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2233 let mut rx = None;
2234 if !self.autoindent_requests.is_empty() {
2235 let channel = oneshot::channel();
2236 self.wait_for_autoindent_txs.push(channel.0);
2237 rx = Some(channel.1);
2238 }
2239 rx
2240 }
2241
2242 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2243 pub fn set_active_selections(
2244 &mut self,
2245 selections: Arc<[Selection<Anchor>]>,
2246 line_mode: bool,
2247 cursor_shape: CursorShape,
2248 cx: &mut Context<Self>,
2249 ) {
2250 let lamport_timestamp = self.text.lamport_clock.tick();
2251 self.remote_selections.insert(
2252 self.text.replica_id(),
2253 SelectionSet {
2254 selections: selections.clone(),
2255 lamport_timestamp,
2256 line_mode,
2257 cursor_shape,
2258 },
2259 );
2260 self.send_operation(
2261 Operation::UpdateSelections {
2262 selections,
2263 line_mode,
2264 lamport_timestamp,
2265 cursor_shape,
2266 },
2267 true,
2268 cx,
2269 );
2270 self.non_text_state_update_count += 1;
2271 cx.notify();
2272 }
2273
2274 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2275 /// this replica.
2276 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2277 if self
2278 .remote_selections
2279 .get(&self.text.replica_id())
2280 .is_none_or(|set| !set.selections.is_empty())
2281 {
2282 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2283 }
2284 }
2285
2286 pub fn set_agent_selections(
2287 &mut self,
2288 selections: Arc<[Selection<Anchor>]>,
2289 line_mode: bool,
2290 cursor_shape: CursorShape,
2291 cx: &mut Context<Self>,
2292 ) {
2293 let lamport_timestamp = self.text.lamport_clock.tick();
2294 self.remote_selections.insert(
2295 ReplicaId::AGENT,
2296 SelectionSet {
2297 selections,
2298 lamport_timestamp,
2299 line_mode,
2300 cursor_shape,
2301 },
2302 );
2303 self.non_text_state_update_count += 1;
2304 cx.notify();
2305 }
2306
2307 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2308 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2309 }
2310
2311 /// Replaces the buffer's entire text.
2312 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2313 where
2314 T: Into<Arc<str>>,
2315 {
2316 self.autoindent_requests.clear();
2317 self.edit([(0..self.len(), text)], None, cx)
2318 }
2319
2320 /// Appends the given text to the end of the buffer.
2321 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2322 where
2323 T: Into<Arc<str>>,
2324 {
2325 self.edit([(self.len()..self.len(), text)], None, cx)
2326 }
2327
2328 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2329 /// delete, and a string of text to insert at that location.
2330 ///
2331 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2332 /// request for the edited ranges, which will be processed when the buffer finishes
2333 /// parsing.
2334 ///
2335 /// Parsing takes place at the end of a transaction, and may compute synchronously
2336 /// or asynchronously, depending on the changes.
2337 pub fn edit<I, S, T>(
2338 &mut self,
2339 edits_iter: I,
2340 autoindent_mode: Option<AutoindentMode>,
2341 cx: &mut Context<Self>,
2342 ) -> Option<clock::Lamport>
2343 where
2344 I: IntoIterator<Item = (Range<S>, T)>,
2345 S: ToOffset,
2346 T: Into<Arc<str>>,
2347 {
2348 // Skip invalid edits and coalesce contiguous ones.
2349 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2350
2351 for (range, new_text) in edits_iter {
2352 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2353
2354 if range.start > range.end {
2355 mem::swap(&mut range.start, &mut range.end);
2356 }
2357 let new_text = new_text.into();
2358 if !new_text.is_empty() || !range.is_empty() {
2359 if let Some((prev_range, prev_text)) = edits.last_mut()
2360 && prev_range.end >= range.start
2361 {
2362 prev_range.end = cmp::max(prev_range.end, range.end);
2363 *prev_text = format!("{prev_text}{new_text}").into();
2364 } else {
2365 edits.push((range, new_text));
2366 }
2367 }
2368 }
2369 if edits.is_empty() {
2370 return None;
2371 }
2372
2373 self.start_transaction();
2374 self.pending_autoindent.take();
2375 let autoindent_request = autoindent_mode
2376 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2377
2378 let edit_operation = self
2379 .text
2380 .edit(edits.iter().cloned(), cx.background_executor());
2381 let edit_id = edit_operation.timestamp();
2382
2383 if let Some((before_edit, mode)) = autoindent_request {
2384 let mut delta = 0isize;
2385 let mut previous_setting = None;
2386 let entries: Vec<_> = edits
2387 .into_iter()
2388 .enumerate()
2389 .zip(&edit_operation.as_edit().unwrap().new_text)
2390 .filter(|((_, (range, _)), _)| {
2391 let language = before_edit.language_at(range.start);
2392 let language_id = language.map(|l| l.id());
2393 if let Some((cached_language_id, auto_indent)) = previous_setting
2394 && cached_language_id == language_id
2395 {
2396 auto_indent
2397 } else {
2398 // The auto-indent setting is not present in editorconfigs, hence
2399 // we can avoid passing the file here.
2400 let auto_indent =
2401 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2402 previous_setting = Some((language_id, auto_indent));
2403 auto_indent
2404 }
2405 })
2406 .map(|((ix, (range, _)), new_text)| {
2407 let new_text_length = new_text.len();
2408 let old_start = range.start.to_point(&before_edit);
2409 let new_start = (delta + range.start as isize) as usize;
2410 let range_len = range.end - range.start;
2411 delta += new_text_length as isize - range_len as isize;
2412
2413 // Decide what range of the insertion to auto-indent, and whether
2414 // the first line of the insertion should be considered a newly-inserted line
2415 // or an edit to an existing line.
2416 let mut range_of_insertion_to_indent = 0..new_text_length;
2417 let mut first_line_is_new = true;
2418
2419 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2420 let old_line_end = before_edit.line_len(old_start.row);
2421
2422 if old_start.column > old_line_start {
2423 first_line_is_new = false;
2424 }
2425
2426 if !new_text.contains('\n')
2427 && (old_start.column + (range_len as u32) < old_line_end
2428 || old_line_end == old_line_start)
2429 {
2430 first_line_is_new = false;
2431 }
2432
2433 // When inserting text starting with a newline, avoid auto-indenting the
2434 // previous line.
2435 if new_text.starts_with('\n') {
2436 range_of_insertion_to_indent.start += 1;
2437 first_line_is_new = true;
2438 }
2439
2440 let mut original_indent_column = None;
2441 if let AutoindentMode::Block {
2442 original_indent_columns,
2443 } = &mode
2444 {
2445 original_indent_column = Some(if new_text.starts_with('\n') {
2446 indent_size_for_text(
2447 new_text[range_of_insertion_to_indent.clone()].chars(),
2448 )
2449 .len
2450 } else {
2451 original_indent_columns
2452 .get(ix)
2453 .copied()
2454 .flatten()
2455 .unwrap_or_else(|| {
2456 indent_size_for_text(
2457 new_text[range_of_insertion_to_indent.clone()].chars(),
2458 )
2459 .len
2460 })
2461 });
2462
2463 // Avoid auto-indenting the line after the edit.
2464 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2465 range_of_insertion_to_indent.end -= 1;
2466 }
2467 }
2468
2469 AutoindentRequestEntry {
2470 first_line_is_new,
2471 original_indent_column,
2472 indent_size: before_edit.language_indent_size_at(range.start, cx),
2473 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2474 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2475 }
2476 })
2477 .collect();
2478
2479 if !entries.is_empty() {
2480 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2481 before_edit,
2482 entries,
2483 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2484 ignore_empty_lines: false,
2485 }));
2486 }
2487 }
2488
2489 self.end_transaction(cx);
2490 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2491 Some(edit_id)
2492 }
2493
2494 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2495 self.was_changed();
2496
2497 if self.edits_since::<usize>(old_version).next().is_none() {
2498 return;
2499 }
2500
2501 self.reparse(cx);
2502 cx.emit(BufferEvent::Edited);
2503 if was_dirty != self.is_dirty() {
2504 cx.emit(BufferEvent::DirtyChanged);
2505 }
2506 cx.notify();
2507 }
2508
2509 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2510 where
2511 I: IntoIterator<Item = Range<T>>,
2512 T: ToOffset + Copy,
2513 {
2514 let before_edit = self.snapshot();
2515 let entries = ranges
2516 .into_iter()
2517 .map(|range| AutoindentRequestEntry {
2518 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2519 first_line_is_new: true,
2520 indent_size: before_edit.language_indent_size_at(range.start, cx),
2521 original_indent_column: None,
2522 })
2523 .collect();
2524 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2525 before_edit,
2526 entries,
2527 is_block_mode: false,
2528 ignore_empty_lines: true,
2529 }));
2530 self.request_autoindent(cx);
2531 }
2532
2533 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2534 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2535 pub fn insert_empty_line(
2536 &mut self,
2537 position: impl ToPoint,
2538 space_above: bool,
2539 space_below: bool,
2540 cx: &mut Context<Self>,
2541 ) -> Point {
2542 let mut position = position.to_point(self);
2543
2544 self.start_transaction();
2545
2546 self.edit(
2547 [(position..position, "\n")],
2548 Some(AutoindentMode::EachLine),
2549 cx,
2550 );
2551
2552 if position.column > 0 {
2553 position += Point::new(1, 0);
2554 }
2555
2556 if !self.is_line_blank(position.row) {
2557 self.edit(
2558 [(position..position, "\n")],
2559 Some(AutoindentMode::EachLine),
2560 cx,
2561 );
2562 }
2563
2564 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2565 self.edit(
2566 [(position..position, "\n")],
2567 Some(AutoindentMode::EachLine),
2568 cx,
2569 );
2570 position.row += 1;
2571 }
2572
2573 if space_below
2574 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2575 {
2576 self.edit(
2577 [(position..position, "\n")],
2578 Some(AutoindentMode::EachLine),
2579 cx,
2580 );
2581 }
2582
2583 self.end_transaction(cx);
2584
2585 position
2586 }
2587
2588 /// Applies the given remote operations to the buffer.
2589 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2590 self.pending_autoindent.take();
2591 let was_dirty = self.is_dirty();
2592 let old_version = self.version.clone();
2593 let mut deferred_ops = Vec::new();
2594 let buffer_ops = ops
2595 .into_iter()
2596 .filter_map(|op| match op {
2597 Operation::Buffer(op) => Some(op),
2598 _ => {
2599 if self.can_apply_op(&op) {
2600 self.apply_op(op, cx);
2601 } else {
2602 deferred_ops.push(op);
2603 }
2604 None
2605 }
2606 })
2607 .collect::<Vec<_>>();
2608 for operation in buffer_ops.iter() {
2609 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2610 }
2611 self.text
2612 .apply_ops(buffer_ops, Some(cx.background_executor()));
2613 self.deferred_ops.insert(deferred_ops);
2614 self.flush_deferred_ops(cx);
2615 self.did_edit(&old_version, was_dirty, cx);
2616 // Notify independently of whether the buffer was edited as the operations could include a
2617 // selection update.
2618 cx.notify();
2619 }
2620
2621 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2622 let mut deferred_ops = Vec::new();
2623 for op in self.deferred_ops.drain().iter().cloned() {
2624 if self.can_apply_op(&op) {
2625 self.apply_op(op, cx);
2626 } else {
2627 deferred_ops.push(op);
2628 }
2629 }
2630 self.deferred_ops.insert(deferred_ops);
2631 }
2632
2633 pub fn has_deferred_ops(&self) -> bool {
2634 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2635 }
2636
2637 fn can_apply_op(&self, operation: &Operation) -> bool {
2638 match operation {
2639 Operation::Buffer(_) => {
2640 unreachable!("buffer operations should never be applied at this layer")
2641 }
2642 Operation::UpdateDiagnostics {
2643 diagnostics: diagnostic_set,
2644 ..
2645 } => diagnostic_set.iter().all(|diagnostic| {
2646 self.text.can_resolve(&diagnostic.range.start)
2647 && self.text.can_resolve(&diagnostic.range.end)
2648 }),
2649 Operation::UpdateSelections { selections, .. } => selections
2650 .iter()
2651 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2652 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2653 }
2654 }
2655
2656 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2657 match operation {
2658 Operation::Buffer(_) => {
2659 unreachable!("buffer operations should never be applied at this layer")
2660 }
2661 Operation::UpdateDiagnostics {
2662 server_id,
2663 diagnostics: diagnostic_set,
2664 lamport_timestamp,
2665 } => {
2666 let snapshot = self.snapshot();
2667 self.apply_diagnostic_update(
2668 server_id,
2669 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2670 lamport_timestamp,
2671 cx,
2672 );
2673 }
2674 Operation::UpdateSelections {
2675 selections,
2676 lamport_timestamp,
2677 line_mode,
2678 cursor_shape,
2679 } => {
2680 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2681 && set.lamport_timestamp > lamport_timestamp
2682 {
2683 return;
2684 }
2685
2686 self.remote_selections.insert(
2687 lamport_timestamp.replica_id,
2688 SelectionSet {
2689 selections,
2690 lamport_timestamp,
2691 line_mode,
2692 cursor_shape,
2693 },
2694 );
2695 self.text.lamport_clock.observe(lamport_timestamp);
2696 self.non_text_state_update_count += 1;
2697 }
2698 Operation::UpdateCompletionTriggers {
2699 triggers,
2700 lamport_timestamp,
2701 server_id,
2702 } => {
2703 if triggers.is_empty() {
2704 self.completion_triggers_per_language_server
2705 .remove(&server_id);
2706 self.completion_triggers = self
2707 .completion_triggers_per_language_server
2708 .values()
2709 .flat_map(|triggers| triggers.iter().cloned())
2710 .collect();
2711 } else {
2712 self.completion_triggers_per_language_server
2713 .insert(server_id, triggers.iter().cloned().collect());
2714 self.completion_triggers.extend(triggers);
2715 }
2716 self.text.lamport_clock.observe(lamport_timestamp);
2717 }
2718 Operation::UpdateLineEnding {
2719 line_ending,
2720 lamport_timestamp,
2721 } => {
2722 self.text.set_line_ending(line_ending);
2723 self.text.lamport_clock.observe(lamport_timestamp);
2724 }
2725 }
2726 }
2727
2728 fn apply_diagnostic_update(
2729 &mut self,
2730 server_id: LanguageServerId,
2731 diagnostics: DiagnosticSet,
2732 lamport_timestamp: clock::Lamport,
2733 cx: &mut Context<Self>,
2734 ) {
2735 if lamport_timestamp > self.diagnostics_timestamp {
2736 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2737 if diagnostics.is_empty() {
2738 if let Ok(ix) = ix {
2739 self.diagnostics.remove(ix);
2740 }
2741 } else {
2742 match ix {
2743 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2744 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2745 };
2746 }
2747 self.diagnostics_timestamp = lamport_timestamp;
2748 self.non_text_state_update_count += 1;
2749 self.text.lamport_clock.observe(lamport_timestamp);
2750 cx.notify();
2751 cx.emit(BufferEvent::DiagnosticsUpdated);
2752 }
2753 }
2754
2755 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2756 self.was_changed();
2757 cx.emit(BufferEvent::Operation {
2758 operation,
2759 is_local,
2760 });
2761 }
2762
2763 /// Removes the selections for a given peer.
2764 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2765 self.remote_selections.remove(&replica_id);
2766 cx.notify();
2767 }
2768
2769 /// Undoes the most recent transaction.
2770 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2771 let was_dirty = self.is_dirty();
2772 let old_version = self.version.clone();
2773
2774 if let Some((transaction_id, operation)) = self.text.undo() {
2775 self.send_operation(Operation::Buffer(operation), true, cx);
2776 self.did_edit(&old_version, was_dirty, cx);
2777 Some(transaction_id)
2778 } else {
2779 None
2780 }
2781 }
2782
2783 /// Manually undoes a specific transaction in the buffer's undo history.
2784 pub fn undo_transaction(
2785 &mut self,
2786 transaction_id: TransactionId,
2787 cx: &mut Context<Self>,
2788 ) -> bool {
2789 let was_dirty = self.is_dirty();
2790 let old_version = self.version.clone();
2791 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2792 self.send_operation(Operation::Buffer(operation), true, cx);
2793 self.did_edit(&old_version, was_dirty, cx);
2794 true
2795 } else {
2796 false
2797 }
2798 }
2799
2800 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2801 pub fn undo_to_transaction(
2802 &mut self,
2803 transaction_id: TransactionId,
2804 cx: &mut Context<Self>,
2805 ) -> bool {
2806 let was_dirty = self.is_dirty();
2807 let old_version = self.version.clone();
2808
2809 let operations = self.text.undo_to_transaction(transaction_id);
2810 let undone = !operations.is_empty();
2811 for operation in operations {
2812 self.send_operation(Operation::Buffer(operation), true, cx);
2813 }
2814 if undone {
2815 self.did_edit(&old_version, was_dirty, cx)
2816 }
2817 undone
2818 }
2819
2820 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2821 let was_dirty = self.is_dirty();
2822 let operation = self.text.undo_operations(counts);
2823 let old_version = self.version.clone();
2824 self.send_operation(Operation::Buffer(operation), true, cx);
2825 self.did_edit(&old_version, was_dirty, cx);
2826 }
2827
2828 /// Manually redoes a specific transaction in the buffer's redo history.
2829 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2830 let was_dirty = self.is_dirty();
2831 let old_version = self.version.clone();
2832
2833 if let Some((transaction_id, operation)) = self.text.redo() {
2834 self.send_operation(Operation::Buffer(operation), true, cx);
2835 self.did_edit(&old_version, was_dirty, cx);
2836 Some(transaction_id)
2837 } else {
2838 None
2839 }
2840 }
2841
2842 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2843 pub fn redo_to_transaction(
2844 &mut self,
2845 transaction_id: TransactionId,
2846 cx: &mut Context<Self>,
2847 ) -> bool {
2848 let was_dirty = self.is_dirty();
2849 let old_version = self.version.clone();
2850
2851 let operations = self.text.redo_to_transaction(transaction_id);
2852 let redone = !operations.is_empty();
2853 for operation in operations {
2854 self.send_operation(Operation::Buffer(operation), true, cx);
2855 }
2856 if redone {
2857 self.did_edit(&old_version, was_dirty, cx)
2858 }
2859 redone
2860 }
2861
2862 /// Override current completion triggers with the user-provided completion triggers.
2863 pub fn set_completion_triggers(
2864 &mut self,
2865 server_id: LanguageServerId,
2866 triggers: BTreeSet<String>,
2867 cx: &mut Context<Self>,
2868 ) {
2869 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2870 if triggers.is_empty() {
2871 self.completion_triggers_per_language_server
2872 .remove(&server_id);
2873 self.completion_triggers = self
2874 .completion_triggers_per_language_server
2875 .values()
2876 .flat_map(|triggers| triggers.iter().cloned())
2877 .collect();
2878 } else {
2879 self.completion_triggers_per_language_server
2880 .insert(server_id, triggers.clone());
2881 self.completion_triggers.extend(triggers.iter().cloned());
2882 }
2883 self.send_operation(
2884 Operation::UpdateCompletionTriggers {
2885 triggers: triggers.into_iter().collect(),
2886 lamport_timestamp: self.completion_triggers_timestamp,
2887 server_id,
2888 },
2889 true,
2890 cx,
2891 );
2892 cx.notify();
2893 }
2894
2895 /// Returns a list of strings which trigger a completion menu for this language.
2896 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2897 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2898 &self.completion_triggers
2899 }
2900
2901 /// Call this directly after performing edits to prevent the preview tab
2902 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2903 /// to return false until there are additional edits.
2904 pub fn refresh_preview(&mut self) {
2905 self.preview_version = self.version.clone();
2906 }
2907
2908 /// Whether we should preserve the preview status of a tab containing this buffer.
2909 pub fn preserve_preview(&self) -> bool {
2910 !self.has_edits_since(&self.preview_version)
2911 }
2912}
2913
2914#[doc(hidden)]
2915#[cfg(any(test, feature = "test-support"))]
2916impl Buffer {
2917 pub fn edit_via_marked_text(
2918 &mut self,
2919 marked_string: &str,
2920 autoindent_mode: Option<AutoindentMode>,
2921 cx: &mut Context<Self>,
2922 ) {
2923 let edits = self.edits_for_marked_text(marked_string);
2924 self.edit(edits, autoindent_mode, cx);
2925 }
2926
2927 pub fn set_group_interval(&mut self, group_interval: Duration) {
2928 self.text.set_group_interval(group_interval);
2929 }
2930
2931 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2932 where
2933 T: rand::Rng,
2934 {
2935 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2936 let mut last_end = None;
2937 for _ in 0..old_range_count {
2938 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2939 break;
2940 }
2941
2942 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2943 let mut range = self.random_byte_range(new_start, rng);
2944 if rng.random_bool(0.2) {
2945 mem::swap(&mut range.start, &mut range.end);
2946 }
2947 last_end = Some(range.end);
2948
2949 let new_text_len = rng.random_range(0..10);
2950 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2951 new_text = new_text.to_uppercase();
2952
2953 edits.push((range, new_text));
2954 }
2955 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2956 self.edit(edits, None, cx);
2957 }
2958
2959 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2960 let was_dirty = self.is_dirty();
2961 let old_version = self.version.clone();
2962
2963 let ops = self.text.randomly_undo_redo(rng);
2964 if !ops.is_empty() {
2965 for op in ops {
2966 self.send_operation(Operation::Buffer(op), true, cx);
2967 self.did_edit(&old_version, was_dirty, cx);
2968 }
2969 }
2970 }
2971}
2972
2973impl EventEmitter<BufferEvent> for Buffer {}
2974
2975impl Deref for Buffer {
2976 type Target = TextBuffer;
2977
2978 fn deref(&self) -> &Self::Target {
2979 &self.text
2980 }
2981}
2982
2983impl BufferSnapshot {
2984 /// Returns [`IndentSize`] for a given line that respects user settings and
2985 /// language preferences.
2986 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2987 indent_size_for_line(self, row)
2988 }
2989
2990 /// Returns [`IndentSize`] for a given position that respects user settings
2991 /// and language preferences.
2992 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2993 let settings = language_settings(
2994 self.language_at(position).map(|l| l.name()),
2995 self.file(),
2996 cx,
2997 );
2998 if settings.hard_tabs {
2999 IndentSize::tab()
3000 } else {
3001 IndentSize::spaces(settings.tab_size.get())
3002 }
3003 }
3004
3005 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3006 /// is passed in as `single_indent_size`.
3007 pub fn suggested_indents(
3008 &self,
3009 rows: impl Iterator<Item = u32>,
3010 single_indent_size: IndentSize,
3011 ) -> BTreeMap<u32, IndentSize> {
3012 let mut result = BTreeMap::new();
3013
3014 for row_range in contiguous_ranges(rows, 10) {
3015 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3016 Some(suggestions) => suggestions,
3017 _ => break,
3018 };
3019
3020 for (row, suggestion) in row_range.zip(suggestions) {
3021 let indent_size = if let Some(suggestion) = suggestion {
3022 result
3023 .get(&suggestion.basis_row)
3024 .copied()
3025 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3026 .with_delta(suggestion.delta, single_indent_size)
3027 } else {
3028 self.indent_size_for_line(row)
3029 };
3030
3031 result.insert(row, indent_size);
3032 }
3033 }
3034
3035 result
3036 }
3037
3038 fn suggest_autoindents(
3039 &self,
3040 row_range: Range<u32>,
3041 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3042 let config = &self.language.as_ref()?.config;
3043 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3044
3045 #[derive(Debug, Clone)]
3046 struct StartPosition {
3047 start: Point,
3048 suffix: SharedString,
3049 }
3050
3051 // Find the suggested indentation ranges based on the syntax tree.
3052 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3053 let end = Point::new(row_range.end, 0);
3054 let range = (start..end).to_offset(&self.text);
3055 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3056 Some(&grammar.indents_config.as_ref()?.query)
3057 });
3058 let indent_configs = matches
3059 .grammars()
3060 .iter()
3061 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3062 .collect::<Vec<_>>();
3063
3064 let mut indent_ranges = Vec::<Range<Point>>::new();
3065 let mut start_positions = Vec::<StartPosition>::new();
3066 let mut outdent_positions = Vec::<Point>::new();
3067 while let Some(mat) = matches.peek() {
3068 let mut start: Option<Point> = None;
3069 let mut end: Option<Point> = None;
3070
3071 let config = indent_configs[mat.grammar_index];
3072 for capture in mat.captures {
3073 if capture.index == config.indent_capture_ix {
3074 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3075 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3076 } else if Some(capture.index) == config.start_capture_ix {
3077 start = Some(Point::from_ts_point(capture.node.end_position()));
3078 } else if Some(capture.index) == config.end_capture_ix {
3079 end = Some(Point::from_ts_point(capture.node.start_position()));
3080 } else if Some(capture.index) == config.outdent_capture_ix {
3081 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3082 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3083 start_positions.push(StartPosition {
3084 start: Point::from_ts_point(capture.node.start_position()),
3085 suffix: suffix.clone(),
3086 });
3087 }
3088 }
3089
3090 matches.advance();
3091 if let Some((start, end)) = start.zip(end) {
3092 if start.row == end.row {
3093 continue;
3094 }
3095 let range = start..end;
3096 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3097 Err(ix) => indent_ranges.insert(ix, range),
3098 Ok(ix) => {
3099 let prev_range = &mut indent_ranges[ix];
3100 prev_range.end = prev_range.end.max(range.end);
3101 }
3102 }
3103 }
3104 }
3105
3106 let mut error_ranges = Vec::<Range<Point>>::new();
3107 let mut matches = self
3108 .syntax
3109 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3110 while let Some(mat) = matches.peek() {
3111 let node = mat.captures[0].node;
3112 let start = Point::from_ts_point(node.start_position());
3113 let end = Point::from_ts_point(node.end_position());
3114 let range = start..end;
3115 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3116 Ok(ix) | Err(ix) => ix,
3117 };
3118 let mut end_ix = ix;
3119 while let Some(existing_range) = error_ranges.get(end_ix) {
3120 if existing_range.end < end {
3121 end_ix += 1;
3122 } else {
3123 break;
3124 }
3125 }
3126 error_ranges.splice(ix..end_ix, [range]);
3127 matches.advance();
3128 }
3129
3130 outdent_positions.sort();
3131 for outdent_position in outdent_positions {
3132 // find the innermost indent range containing this outdent_position
3133 // set its end to the outdent position
3134 if let Some(range_to_truncate) = indent_ranges
3135 .iter_mut()
3136 .filter(|indent_range| indent_range.contains(&outdent_position))
3137 .next_back()
3138 {
3139 range_to_truncate.end = outdent_position;
3140 }
3141 }
3142
3143 start_positions.sort_by_key(|b| b.start);
3144
3145 // Find the suggested indentation increases and decreased based on regexes.
3146 let mut regex_outdent_map = HashMap::default();
3147 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3148 let mut start_positions_iter = start_positions.iter().peekable();
3149
3150 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3151 self.for_each_line(
3152 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3153 ..Point::new(row_range.end, 0),
3154 |row, line| {
3155 if config
3156 .decrease_indent_pattern
3157 .as_ref()
3158 .is_some_and(|regex| regex.is_match(line))
3159 {
3160 indent_change_rows.push((row, Ordering::Less));
3161 }
3162 if config
3163 .increase_indent_pattern
3164 .as_ref()
3165 .is_some_and(|regex| regex.is_match(line))
3166 {
3167 indent_change_rows.push((row + 1, Ordering::Greater));
3168 }
3169 while let Some(pos) = start_positions_iter.peek() {
3170 if pos.start.row < row {
3171 let pos = start_positions_iter.next().unwrap();
3172 last_seen_suffix
3173 .entry(pos.suffix.to_string())
3174 .or_default()
3175 .push(pos.start);
3176 } else {
3177 break;
3178 }
3179 }
3180 for rule in &config.decrease_indent_patterns {
3181 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3182 let row_start_column = self.indent_size_for_line(row).len;
3183 let basis_row = rule
3184 .valid_after
3185 .iter()
3186 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3187 .flatten()
3188 .filter(|start_point| start_point.column <= row_start_column)
3189 .max_by_key(|start_point| start_point.row);
3190 if let Some(outdent_to_row) = basis_row {
3191 regex_outdent_map.insert(row, outdent_to_row.row);
3192 }
3193 break;
3194 }
3195 }
3196 },
3197 );
3198
3199 let mut indent_changes = indent_change_rows.into_iter().peekable();
3200 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3201 prev_non_blank_row.unwrap_or(0)
3202 } else {
3203 row_range.start.saturating_sub(1)
3204 };
3205
3206 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3207 Some(row_range.map(move |row| {
3208 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3209
3210 let mut indent_from_prev_row = false;
3211 let mut outdent_from_prev_row = false;
3212 let mut outdent_to_row = u32::MAX;
3213 let mut from_regex = false;
3214
3215 while let Some((indent_row, delta)) = indent_changes.peek() {
3216 match indent_row.cmp(&row) {
3217 Ordering::Equal => match delta {
3218 Ordering::Less => {
3219 from_regex = true;
3220 outdent_from_prev_row = true
3221 }
3222 Ordering::Greater => {
3223 indent_from_prev_row = true;
3224 from_regex = true
3225 }
3226 _ => {}
3227 },
3228
3229 Ordering::Greater => break,
3230 Ordering::Less => {}
3231 }
3232
3233 indent_changes.next();
3234 }
3235
3236 for range in &indent_ranges {
3237 if range.start.row >= row {
3238 break;
3239 }
3240 if range.start.row == prev_row && range.end > row_start {
3241 indent_from_prev_row = true;
3242 }
3243 if range.end > prev_row_start && range.end <= row_start {
3244 outdent_to_row = outdent_to_row.min(range.start.row);
3245 }
3246 }
3247
3248 if let Some(basis_row) = regex_outdent_map.get(&row) {
3249 indent_from_prev_row = false;
3250 outdent_to_row = *basis_row;
3251 from_regex = true;
3252 }
3253
3254 let within_error = error_ranges
3255 .iter()
3256 .any(|e| e.start.row < row && e.end > row_start);
3257
3258 let suggestion = if outdent_to_row == prev_row
3259 || (outdent_from_prev_row && indent_from_prev_row)
3260 {
3261 Some(IndentSuggestion {
3262 basis_row: prev_row,
3263 delta: Ordering::Equal,
3264 within_error: within_error && !from_regex,
3265 })
3266 } else if indent_from_prev_row {
3267 Some(IndentSuggestion {
3268 basis_row: prev_row,
3269 delta: Ordering::Greater,
3270 within_error: within_error && !from_regex,
3271 })
3272 } else if outdent_to_row < prev_row {
3273 Some(IndentSuggestion {
3274 basis_row: outdent_to_row,
3275 delta: Ordering::Equal,
3276 within_error: within_error && !from_regex,
3277 })
3278 } else if outdent_from_prev_row {
3279 Some(IndentSuggestion {
3280 basis_row: prev_row,
3281 delta: Ordering::Less,
3282 within_error: within_error && !from_regex,
3283 })
3284 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3285 {
3286 Some(IndentSuggestion {
3287 basis_row: prev_row,
3288 delta: Ordering::Equal,
3289 within_error: within_error && !from_regex,
3290 })
3291 } else {
3292 None
3293 };
3294
3295 prev_row = row;
3296 prev_row_start = row_start;
3297 suggestion
3298 }))
3299 }
3300
3301 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3302 while row > 0 {
3303 row -= 1;
3304 if !self.is_line_blank(row) {
3305 return Some(row);
3306 }
3307 }
3308 None
3309 }
3310
3311 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3312 let captures = self.syntax.captures(range, &self.text, |grammar| {
3313 grammar
3314 .highlights_config
3315 .as_ref()
3316 .map(|config| &config.query)
3317 });
3318 let highlight_maps = captures
3319 .grammars()
3320 .iter()
3321 .map(|grammar| grammar.highlight_map())
3322 .collect();
3323 (captures, highlight_maps)
3324 }
3325
3326 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3327 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3328 /// returned in chunks where each chunk has a single syntax highlighting style and
3329 /// diagnostic status.
3330 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3331 let range = range.start.to_offset(self)..range.end.to_offset(self);
3332
3333 let mut syntax = None;
3334 if language_aware {
3335 syntax = Some(self.get_highlights(range.clone()));
3336 }
3337 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3338 let diagnostics = language_aware;
3339 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3340 }
3341
3342 pub fn highlighted_text_for_range<T: ToOffset>(
3343 &self,
3344 range: Range<T>,
3345 override_style: Option<HighlightStyle>,
3346 syntax_theme: &SyntaxTheme,
3347 ) -> HighlightedText {
3348 HighlightedText::from_buffer_range(
3349 range,
3350 &self.text,
3351 &self.syntax,
3352 override_style,
3353 syntax_theme,
3354 )
3355 }
3356
3357 /// Invokes the given callback for each line of text in the given range of the buffer.
3358 /// Uses callback to avoid allocating a string for each line.
3359 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3360 let mut line = String::new();
3361 let mut row = range.start.row;
3362 for chunk in self
3363 .as_rope()
3364 .chunks_in_range(range.to_offset(self))
3365 .chain(["\n"])
3366 {
3367 for (newline_ix, text) in chunk.split('\n').enumerate() {
3368 if newline_ix > 0 {
3369 callback(row, &line);
3370 row += 1;
3371 line.clear();
3372 }
3373 line.push_str(text);
3374 }
3375 }
3376 }
3377
3378 /// Iterates over every [`SyntaxLayer`] in the buffer.
3379 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3380 self.syntax_layers_for_range(0..self.len(), true)
3381 }
3382
3383 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3384 let offset = position.to_offset(self);
3385 self.syntax_layers_for_range(offset..offset, false)
3386 .filter(|l| l.node().end_byte() > offset)
3387 .last()
3388 }
3389
3390 pub fn syntax_layers_for_range<D: ToOffset>(
3391 &self,
3392 range: Range<D>,
3393 include_hidden: bool,
3394 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3395 self.syntax
3396 .layers_for_range(range, &self.text, include_hidden)
3397 }
3398
3399 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3400 &self,
3401 range: Range<D>,
3402 ) -> Option<SyntaxLayer<'_>> {
3403 let range = range.to_offset(self);
3404 self.syntax
3405 .layers_for_range(range, &self.text, false)
3406 .max_by(|a, b| {
3407 if a.depth != b.depth {
3408 a.depth.cmp(&b.depth)
3409 } else if a.offset.0 != b.offset.0 {
3410 a.offset.0.cmp(&b.offset.0)
3411 } else {
3412 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3413 }
3414 })
3415 }
3416
3417 /// Returns the main [`Language`].
3418 pub fn language(&self) -> Option<&Arc<Language>> {
3419 self.language.as_ref()
3420 }
3421
3422 /// Returns the [`Language`] at the given location.
3423 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3424 self.syntax_layer_at(position)
3425 .map(|info| info.language)
3426 .or(self.language.as_ref())
3427 }
3428
3429 /// Returns the settings for the language at the given location.
3430 pub fn settings_at<'a, D: ToOffset>(
3431 &'a self,
3432 position: D,
3433 cx: &'a App,
3434 ) -> Cow<'a, LanguageSettings> {
3435 language_settings(
3436 self.language_at(position).map(|l| l.name()),
3437 self.file.as_ref(),
3438 cx,
3439 )
3440 }
3441
3442 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3443 CharClassifier::new(self.language_scope_at(point))
3444 }
3445
3446 /// Returns the [`LanguageScope`] at the given location.
3447 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3448 let offset = position.to_offset(self);
3449 let mut scope = None;
3450 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3451
3452 // Use the layer that has the smallest node intersecting the given point.
3453 for layer in self
3454 .syntax
3455 .layers_for_range(offset..offset, &self.text, false)
3456 {
3457 let mut cursor = layer.node().walk();
3458
3459 let mut range = None;
3460 loop {
3461 let child_range = cursor.node().byte_range();
3462 if !child_range.contains(&offset) {
3463 break;
3464 }
3465
3466 range = Some(child_range);
3467 if cursor.goto_first_child_for_byte(offset).is_none() {
3468 break;
3469 }
3470 }
3471
3472 if let Some(range) = range
3473 && smallest_range_and_depth.as_ref().is_none_or(
3474 |(smallest_range, smallest_range_depth)| {
3475 if layer.depth > *smallest_range_depth {
3476 true
3477 } else if layer.depth == *smallest_range_depth {
3478 range.len() < smallest_range.len()
3479 } else {
3480 false
3481 }
3482 },
3483 )
3484 {
3485 smallest_range_and_depth = Some((range, layer.depth));
3486 scope = Some(LanguageScope {
3487 language: layer.language.clone(),
3488 override_id: layer.override_id(offset, &self.text),
3489 });
3490 }
3491 }
3492
3493 scope.or_else(|| {
3494 self.language.clone().map(|language| LanguageScope {
3495 language,
3496 override_id: None,
3497 })
3498 })
3499 }
3500
3501 /// Returns a tuple of the range and character kind of the word
3502 /// surrounding the given position.
3503 pub fn surrounding_word<T: ToOffset>(
3504 &self,
3505 start: T,
3506 scope_context: Option<CharScopeContext>,
3507 ) -> (Range<usize>, Option<CharKind>) {
3508 let mut start = start.to_offset(self);
3509 let mut end = start;
3510 let mut next_chars = self.chars_at(start).take(128).peekable();
3511 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3512
3513 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3514 let word_kind = cmp::max(
3515 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3516 next_chars.peek().copied().map(|c| classifier.kind(c)),
3517 );
3518
3519 for ch in prev_chars {
3520 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3521 start -= ch.len_utf8();
3522 } else {
3523 break;
3524 }
3525 }
3526
3527 for ch in next_chars {
3528 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3529 end += ch.len_utf8();
3530 } else {
3531 break;
3532 }
3533 }
3534
3535 (start..end, word_kind)
3536 }
3537
3538 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3539 /// range. When `require_larger` is true, the node found must be larger than the query range.
3540 ///
3541 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3542 /// be moved to the root of the tree.
3543 fn goto_node_enclosing_range(
3544 cursor: &mut tree_sitter::TreeCursor,
3545 query_range: &Range<usize>,
3546 require_larger: bool,
3547 ) -> bool {
3548 let mut ascending = false;
3549 loop {
3550 let mut range = cursor.node().byte_range();
3551 if query_range.is_empty() {
3552 // When the query range is empty and the current node starts after it, move to the
3553 // previous sibling to find the node the containing node.
3554 if range.start > query_range.start {
3555 cursor.goto_previous_sibling();
3556 range = cursor.node().byte_range();
3557 }
3558 } else {
3559 // When the query range is non-empty and the current node ends exactly at the start,
3560 // move to the next sibling to find a node that extends beyond the start.
3561 if range.end == query_range.start {
3562 cursor.goto_next_sibling();
3563 range = cursor.node().byte_range();
3564 }
3565 }
3566
3567 let encloses = range.contains_inclusive(query_range)
3568 && (!require_larger || range.len() > query_range.len());
3569 if !encloses {
3570 ascending = true;
3571 if !cursor.goto_parent() {
3572 return false;
3573 }
3574 continue;
3575 } else if ascending {
3576 return true;
3577 }
3578
3579 // Descend into the current node.
3580 if cursor
3581 .goto_first_child_for_byte(query_range.start)
3582 .is_none()
3583 {
3584 return true;
3585 }
3586 }
3587 }
3588
3589 pub fn syntax_ancestor<'a, T: ToOffset>(
3590 &'a self,
3591 range: Range<T>,
3592 ) -> Option<tree_sitter::Node<'a>> {
3593 let range = range.start.to_offset(self)..range.end.to_offset(self);
3594 let mut result: Option<tree_sitter::Node<'a>> = None;
3595 for layer in self
3596 .syntax
3597 .layers_for_range(range.clone(), &self.text, true)
3598 {
3599 let mut cursor = layer.node().walk();
3600
3601 // Find the node that both contains the range and is larger than it.
3602 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3603 continue;
3604 }
3605
3606 let left_node = cursor.node();
3607 let mut layer_result = left_node;
3608
3609 // For an empty range, try to find another node immediately to the right of the range.
3610 if left_node.end_byte() == range.start {
3611 let mut right_node = None;
3612 while !cursor.goto_next_sibling() {
3613 if !cursor.goto_parent() {
3614 break;
3615 }
3616 }
3617
3618 while cursor.node().start_byte() == range.start {
3619 right_node = Some(cursor.node());
3620 if !cursor.goto_first_child() {
3621 break;
3622 }
3623 }
3624
3625 // If there is a candidate node on both sides of the (empty) range, then
3626 // decide between the two by favoring a named node over an anonymous token.
3627 // If both nodes are the same in that regard, favor the right one.
3628 if let Some(right_node) = right_node
3629 && (right_node.is_named() || !left_node.is_named())
3630 {
3631 layer_result = right_node;
3632 }
3633 }
3634
3635 if let Some(previous_result) = &result
3636 && previous_result.byte_range().len() < layer_result.byte_range().len()
3637 {
3638 continue;
3639 }
3640 result = Some(layer_result);
3641 }
3642
3643 result
3644 }
3645
3646 /// Find the previous sibling syntax node at the given range.
3647 ///
3648 /// This function locates the syntax node that precedes the node containing
3649 /// the given range. It searches hierarchically by:
3650 /// 1. Finding the node that contains the given range
3651 /// 2. Looking for the previous sibling at the same tree level
3652 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3653 ///
3654 /// Returns `None` if there is no previous sibling at any ancestor level.
3655 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3656 &'a self,
3657 range: Range<T>,
3658 ) -> Option<tree_sitter::Node<'a>> {
3659 let range = range.start.to_offset(self)..range.end.to_offset(self);
3660 let mut result: Option<tree_sitter::Node<'a>> = None;
3661
3662 for layer in self
3663 .syntax
3664 .layers_for_range(range.clone(), &self.text, true)
3665 {
3666 let mut cursor = layer.node().walk();
3667
3668 // Find the node that contains the range
3669 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3670 continue;
3671 }
3672
3673 // Look for the previous sibling, moving up ancestor levels if needed
3674 loop {
3675 if cursor.goto_previous_sibling() {
3676 let layer_result = cursor.node();
3677
3678 if let Some(previous_result) = &result {
3679 if previous_result.byte_range().end < layer_result.byte_range().end {
3680 continue;
3681 }
3682 }
3683 result = Some(layer_result);
3684 break;
3685 }
3686
3687 // No sibling found at this level, try moving up to parent
3688 if !cursor.goto_parent() {
3689 break;
3690 }
3691 }
3692 }
3693
3694 result
3695 }
3696
3697 /// Find the next sibling syntax node at the given range.
3698 ///
3699 /// This function locates the syntax node that follows the node containing
3700 /// the given range. It searches hierarchically by:
3701 /// 1. Finding the node that contains the given range
3702 /// 2. Looking for the next sibling at the same tree level
3703 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3704 ///
3705 /// Returns `None` if there is no next sibling at any ancestor level.
3706 pub fn syntax_next_sibling<'a, T: ToOffset>(
3707 &'a self,
3708 range: Range<T>,
3709 ) -> Option<tree_sitter::Node<'a>> {
3710 let range = range.start.to_offset(self)..range.end.to_offset(self);
3711 let mut result: Option<tree_sitter::Node<'a>> = None;
3712
3713 for layer in self
3714 .syntax
3715 .layers_for_range(range.clone(), &self.text, true)
3716 {
3717 let mut cursor = layer.node().walk();
3718
3719 // Find the node that contains the range
3720 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3721 continue;
3722 }
3723
3724 // Look for the next sibling, moving up ancestor levels if needed
3725 loop {
3726 if cursor.goto_next_sibling() {
3727 let layer_result = cursor.node();
3728
3729 if let Some(previous_result) = &result {
3730 if previous_result.byte_range().start > layer_result.byte_range().start {
3731 continue;
3732 }
3733 }
3734 result = Some(layer_result);
3735 break;
3736 }
3737
3738 // No sibling found at this level, try moving up to parent
3739 if !cursor.goto_parent() {
3740 break;
3741 }
3742 }
3743 }
3744
3745 result
3746 }
3747
3748 /// Returns the root syntax node within the given row
3749 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3750 let start_offset = position.to_offset(self);
3751
3752 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3753
3754 let layer = self
3755 .syntax
3756 .layers_for_range(start_offset..start_offset, &self.text, true)
3757 .next()?;
3758
3759 let mut cursor = layer.node().walk();
3760
3761 // Descend to the first leaf that touches the start of the range.
3762 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3763 if cursor.node().end_byte() == start_offset {
3764 cursor.goto_next_sibling();
3765 }
3766 }
3767
3768 // Ascend to the root node within the same row.
3769 while cursor.goto_parent() {
3770 if cursor.node().start_position().row != row {
3771 break;
3772 }
3773 }
3774
3775 Some(cursor.node())
3776 }
3777
3778 /// Returns the outline for the buffer.
3779 ///
3780 /// This method allows passing an optional [`SyntaxTheme`] to
3781 /// syntax-highlight the returned symbols.
3782 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3783 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3784 }
3785
3786 /// Returns all the symbols that contain the given position.
3787 ///
3788 /// This method allows passing an optional [`SyntaxTheme`] to
3789 /// syntax-highlight the returned symbols.
3790 pub fn symbols_containing<T: ToOffset>(
3791 &self,
3792 position: T,
3793 theme: Option<&SyntaxTheme>,
3794 ) -> Vec<OutlineItem<Anchor>> {
3795 let position = position.to_offset(self);
3796 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3797 let end = self.clip_offset(position + 1, Bias::Right);
3798 let mut items = self.outline_items_containing(start..end, false, theme);
3799 let mut prev_depth = None;
3800 items.retain(|item| {
3801 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3802 prev_depth = Some(item.depth);
3803 result
3804 });
3805 items
3806 }
3807
3808 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3809 let range = range.to_offset(self);
3810 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3811 grammar.outline_config.as_ref().map(|c| &c.query)
3812 });
3813 let configs = matches
3814 .grammars()
3815 .iter()
3816 .map(|g| g.outline_config.as_ref().unwrap())
3817 .collect::<Vec<_>>();
3818
3819 while let Some(mat) = matches.peek() {
3820 let config = &configs[mat.grammar_index];
3821 let containing_item_node = maybe!({
3822 let item_node = mat.captures.iter().find_map(|cap| {
3823 if cap.index == config.item_capture_ix {
3824 Some(cap.node)
3825 } else {
3826 None
3827 }
3828 })?;
3829
3830 let item_byte_range = item_node.byte_range();
3831 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3832 None
3833 } else {
3834 Some(item_node)
3835 }
3836 });
3837
3838 if let Some(item_node) = containing_item_node {
3839 return Some(
3840 Point::from_ts_point(item_node.start_position())
3841 ..Point::from_ts_point(item_node.end_position()),
3842 );
3843 }
3844
3845 matches.advance();
3846 }
3847 None
3848 }
3849
3850 pub fn outline_items_containing<T: ToOffset>(
3851 &self,
3852 range: Range<T>,
3853 include_extra_context: bool,
3854 theme: Option<&SyntaxTheme>,
3855 ) -> Vec<OutlineItem<Anchor>> {
3856 self.outline_items_containing_internal(
3857 range,
3858 include_extra_context,
3859 theme,
3860 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3861 )
3862 }
3863
3864 pub fn outline_items_as_points_containing<T: ToOffset>(
3865 &self,
3866 range: Range<T>,
3867 include_extra_context: bool,
3868 theme: Option<&SyntaxTheme>,
3869 ) -> Vec<OutlineItem<Point>> {
3870 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3871 range
3872 })
3873 }
3874
3875 fn outline_items_containing_internal<T: ToOffset, U>(
3876 &self,
3877 range: Range<T>,
3878 include_extra_context: bool,
3879 theme: Option<&SyntaxTheme>,
3880 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3881 ) -> Vec<OutlineItem<U>> {
3882 let range = range.to_offset(self);
3883 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3884 grammar.outline_config.as_ref().map(|c| &c.query)
3885 });
3886
3887 let mut items = Vec::new();
3888 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3889 while let Some(mat) = matches.peek() {
3890 let config = matches.grammars()[mat.grammar_index]
3891 .outline_config
3892 .as_ref()
3893 .unwrap();
3894 if let Some(item) =
3895 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3896 {
3897 items.push(item);
3898 } else if let Some(capture) = mat
3899 .captures
3900 .iter()
3901 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3902 {
3903 let capture_range = capture.node.start_position()..capture.node.end_position();
3904 let mut capture_row_range =
3905 capture_range.start.row as u32..capture_range.end.row as u32;
3906 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3907 {
3908 capture_row_range.end -= 1;
3909 }
3910 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3911 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3912 last_row_range.end = capture_row_range.end;
3913 } else {
3914 annotation_row_ranges.push(capture_row_range);
3915 }
3916 } else {
3917 annotation_row_ranges.push(capture_row_range);
3918 }
3919 }
3920 matches.advance();
3921 }
3922
3923 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3924
3925 // Assign depths based on containment relationships and convert to anchors.
3926 let mut item_ends_stack = Vec::<Point>::new();
3927 let mut anchor_items = Vec::new();
3928 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3929 for item in items {
3930 while let Some(last_end) = item_ends_stack.last().copied() {
3931 if last_end < item.range.end {
3932 item_ends_stack.pop();
3933 } else {
3934 break;
3935 }
3936 }
3937
3938 let mut annotation_row_range = None;
3939 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3940 let row_preceding_item = item.range.start.row.saturating_sub(1);
3941 if next_annotation_row_range.end < row_preceding_item {
3942 annotation_row_ranges.next();
3943 } else {
3944 if next_annotation_row_range.end == row_preceding_item {
3945 annotation_row_range = Some(next_annotation_row_range.clone());
3946 annotation_row_ranges.next();
3947 }
3948 break;
3949 }
3950 }
3951
3952 anchor_items.push(OutlineItem {
3953 depth: item_ends_stack.len(),
3954 range: range_callback(self, item.range.clone()),
3955 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3956 text: item.text,
3957 highlight_ranges: item.highlight_ranges,
3958 name_ranges: item.name_ranges,
3959 body_range: item.body_range.map(|r| range_callback(self, r)),
3960 annotation_range: annotation_row_range.map(|annotation_range| {
3961 let point_range = Point::new(annotation_range.start, 0)
3962 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3963 range_callback(self, point_range)
3964 }),
3965 });
3966 item_ends_stack.push(item.range.end);
3967 }
3968
3969 anchor_items
3970 }
3971
3972 fn next_outline_item(
3973 &self,
3974 config: &OutlineConfig,
3975 mat: &SyntaxMapMatch,
3976 range: &Range<usize>,
3977 include_extra_context: bool,
3978 theme: Option<&SyntaxTheme>,
3979 ) -> Option<OutlineItem<Point>> {
3980 let item_node = mat.captures.iter().find_map(|cap| {
3981 if cap.index == config.item_capture_ix {
3982 Some(cap.node)
3983 } else {
3984 None
3985 }
3986 })?;
3987
3988 let item_byte_range = item_node.byte_range();
3989 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3990 return None;
3991 }
3992 let item_point_range = Point::from_ts_point(item_node.start_position())
3993 ..Point::from_ts_point(item_node.end_position());
3994
3995 let mut open_point = None;
3996 let mut close_point = None;
3997
3998 let mut buffer_ranges = Vec::new();
3999 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4000 let mut range = node.start_byte()..node.end_byte();
4001 let start = node.start_position();
4002 if node.end_position().row > start.row {
4003 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4004 }
4005
4006 if !range.is_empty() {
4007 buffer_ranges.push((range, node_is_name));
4008 }
4009 };
4010
4011 for capture in mat.captures {
4012 if capture.index == config.name_capture_ix {
4013 add_to_buffer_ranges(capture.node, true);
4014 } else if Some(capture.index) == config.context_capture_ix
4015 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4016 {
4017 add_to_buffer_ranges(capture.node, false);
4018 } else {
4019 if Some(capture.index) == config.open_capture_ix {
4020 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4021 } else if Some(capture.index) == config.close_capture_ix {
4022 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4023 }
4024 }
4025 }
4026
4027 if buffer_ranges.is_empty() {
4028 return None;
4029 }
4030 let source_range_for_text =
4031 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4032
4033 let mut text = String::new();
4034 let mut highlight_ranges = Vec::new();
4035 let mut name_ranges = Vec::new();
4036 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4037 let mut last_buffer_range_end = 0;
4038 for (buffer_range, is_name) in buffer_ranges {
4039 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4040 if space_added {
4041 text.push(' ');
4042 }
4043 let before_append_len = text.len();
4044 let mut offset = buffer_range.start;
4045 chunks.seek(buffer_range.clone());
4046 for mut chunk in chunks.by_ref() {
4047 if chunk.text.len() > buffer_range.end - offset {
4048 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4049 offset = buffer_range.end;
4050 } else {
4051 offset += chunk.text.len();
4052 }
4053 let style = chunk
4054 .syntax_highlight_id
4055 .zip(theme)
4056 .and_then(|(highlight, theme)| highlight.style(theme));
4057 if let Some(style) = style {
4058 let start = text.len();
4059 let end = start + chunk.text.len();
4060 highlight_ranges.push((start..end, style));
4061 }
4062 text.push_str(chunk.text);
4063 if offset >= buffer_range.end {
4064 break;
4065 }
4066 }
4067 if is_name {
4068 let after_append_len = text.len();
4069 let start = if space_added && !name_ranges.is_empty() {
4070 before_append_len - 1
4071 } else {
4072 before_append_len
4073 };
4074 name_ranges.push(start..after_append_len);
4075 }
4076 last_buffer_range_end = buffer_range.end;
4077 }
4078
4079 Some(OutlineItem {
4080 depth: 0, // We'll calculate the depth later
4081 range: item_point_range,
4082 source_range_for_text: source_range_for_text.to_point(self),
4083 text,
4084 highlight_ranges,
4085 name_ranges,
4086 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4087 annotation_range: None,
4088 })
4089 }
4090
4091 pub fn function_body_fold_ranges<T: ToOffset>(
4092 &self,
4093 within: Range<T>,
4094 ) -> impl Iterator<Item = Range<usize>> + '_ {
4095 self.text_object_ranges(within, TreeSitterOptions::default())
4096 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4097 }
4098
4099 /// For each grammar in the language, runs the provided
4100 /// [`tree_sitter::Query`] against the given range.
4101 pub fn matches(
4102 &self,
4103 range: Range<usize>,
4104 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4105 ) -> SyntaxMapMatches<'_> {
4106 self.syntax.matches(range, self, query)
4107 }
4108
4109 pub fn all_bracket_ranges(
4110 &self,
4111 range: Range<usize>,
4112 ) -> impl Iterator<Item = BracketMatch> + '_ {
4113 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4114 grammar.brackets_config.as_ref().map(|c| &c.query)
4115 });
4116 let configs = matches
4117 .grammars()
4118 .iter()
4119 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4120 .collect::<Vec<_>>();
4121
4122 iter::from_fn(move || {
4123 while let Some(mat) = matches.peek() {
4124 let mut open = None;
4125 let mut close = None;
4126 let config = &configs[mat.grammar_index];
4127 let pattern = &config.patterns[mat.pattern_index];
4128 for capture in mat.captures {
4129 if capture.index == config.open_capture_ix {
4130 open = Some(capture.node.byte_range());
4131 } else if capture.index == config.close_capture_ix {
4132 close = Some(capture.node.byte_range());
4133 }
4134 }
4135
4136 matches.advance();
4137
4138 let Some((open_range, close_range)) = open.zip(close) else {
4139 continue;
4140 };
4141
4142 let bracket_range = open_range.start..=close_range.end;
4143 if !bracket_range.overlaps(&range) {
4144 continue;
4145 }
4146
4147 return Some(BracketMatch {
4148 open_range,
4149 close_range,
4150 newline_only: pattern.newline_only,
4151 });
4152 }
4153 None
4154 })
4155 }
4156
4157 /// Returns bracket range pairs overlapping or adjacent to `range`
4158 pub fn bracket_ranges<T: ToOffset>(
4159 &self,
4160 range: Range<T>,
4161 ) -> impl Iterator<Item = BracketMatch> + '_ {
4162 // Find bracket pairs that *inclusively* contain the given range.
4163 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4164 self.all_bracket_ranges(range)
4165 .filter(|pair| !pair.newline_only)
4166 }
4167
4168 pub fn debug_variables_query<T: ToOffset>(
4169 &self,
4170 range: Range<T>,
4171 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4172 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4173
4174 let mut matches = self.syntax.matches_with_options(
4175 range.clone(),
4176 &self.text,
4177 TreeSitterOptions::default(),
4178 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4179 );
4180
4181 let configs = matches
4182 .grammars()
4183 .iter()
4184 .map(|grammar| grammar.debug_variables_config.as_ref())
4185 .collect::<Vec<_>>();
4186
4187 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4188
4189 iter::from_fn(move || {
4190 loop {
4191 while let Some(capture) = captures.pop() {
4192 if capture.0.overlaps(&range) {
4193 return Some(capture);
4194 }
4195 }
4196
4197 let mat = matches.peek()?;
4198
4199 let Some(config) = configs[mat.grammar_index].as_ref() else {
4200 matches.advance();
4201 continue;
4202 };
4203
4204 for capture in mat.captures {
4205 let Some(ix) = config
4206 .objects_by_capture_ix
4207 .binary_search_by_key(&capture.index, |e| e.0)
4208 .ok()
4209 else {
4210 continue;
4211 };
4212 let text_object = config.objects_by_capture_ix[ix].1;
4213 let byte_range = capture.node.byte_range();
4214
4215 let mut found = false;
4216 for (range, existing) in captures.iter_mut() {
4217 if existing == &text_object {
4218 range.start = range.start.min(byte_range.start);
4219 range.end = range.end.max(byte_range.end);
4220 found = true;
4221 break;
4222 }
4223 }
4224
4225 if !found {
4226 captures.push((byte_range, text_object));
4227 }
4228 }
4229
4230 matches.advance();
4231 }
4232 })
4233 }
4234
4235 pub fn text_object_ranges<T: ToOffset>(
4236 &self,
4237 range: Range<T>,
4238 options: TreeSitterOptions,
4239 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4240 let range =
4241 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4242
4243 let mut matches =
4244 self.syntax
4245 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4246 grammar.text_object_config.as_ref().map(|c| &c.query)
4247 });
4248
4249 let configs = matches
4250 .grammars()
4251 .iter()
4252 .map(|grammar| grammar.text_object_config.as_ref())
4253 .collect::<Vec<_>>();
4254
4255 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4256
4257 iter::from_fn(move || {
4258 loop {
4259 while let Some(capture) = captures.pop() {
4260 if capture.0.overlaps(&range) {
4261 return Some(capture);
4262 }
4263 }
4264
4265 let mat = matches.peek()?;
4266
4267 let Some(config) = configs[mat.grammar_index].as_ref() else {
4268 matches.advance();
4269 continue;
4270 };
4271
4272 for capture in mat.captures {
4273 let Some(ix) = config
4274 .text_objects_by_capture_ix
4275 .binary_search_by_key(&capture.index, |e| e.0)
4276 .ok()
4277 else {
4278 continue;
4279 };
4280 let text_object = config.text_objects_by_capture_ix[ix].1;
4281 let byte_range = capture.node.byte_range();
4282
4283 let mut found = false;
4284 for (range, existing) in captures.iter_mut() {
4285 if existing == &text_object {
4286 range.start = range.start.min(byte_range.start);
4287 range.end = range.end.max(byte_range.end);
4288 found = true;
4289 break;
4290 }
4291 }
4292
4293 if !found {
4294 captures.push((byte_range, text_object));
4295 }
4296 }
4297
4298 matches.advance();
4299 }
4300 })
4301 }
4302
4303 /// Returns enclosing bracket ranges containing the given range
4304 pub fn enclosing_bracket_ranges<T: ToOffset>(
4305 &self,
4306 range: Range<T>,
4307 ) -> impl Iterator<Item = BracketMatch> + '_ {
4308 let range = range.start.to_offset(self)..range.end.to_offset(self);
4309
4310 self.bracket_ranges(range.clone()).filter(move |pair| {
4311 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4312 })
4313 }
4314
4315 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4316 ///
4317 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4318 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4319 &self,
4320 range: Range<T>,
4321 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4322 ) -> Option<(Range<usize>, Range<usize>)> {
4323 let range = range.start.to_offset(self)..range.end.to_offset(self);
4324
4325 // Get the ranges of the innermost pair of brackets.
4326 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4327
4328 for pair in self.enclosing_bracket_ranges(range) {
4329 if let Some(range_filter) = range_filter
4330 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4331 {
4332 continue;
4333 }
4334
4335 let len = pair.close_range.end - pair.open_range.start;
4336
4337 if let Some((existing_open, existing_close)) = &result {
4338 let existing_len = existing_close.end - existing_open.start;
4339 if len > existing_len {
4340 continue;
4341 }
4342 }
4343
4344 result = Some((pair.open_range, pair.close_range));
4345 }
4346
4347 result
4348 }
4349
4350 /// Returns anchor ranges for any matches of the redaction query.
4351 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4352 /// will be run on the relevant section of the buffer.
4353 pub fn redacted_ranges<T: ToOffset>(
4354 &self,
4355 range: Range<T>,
4356 ) -> impl Iterator<Item = Range<usize>> + '_ {
4357 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4358 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4359 grammar
4360 .redactions_config
4361 .as_ref()
4362 .map(|config| &config.query)
4363 });
4364
4365 let configs = syntax_matches
4366 .grammars()
4367 .iter()
4368 .map(|grammar| grammar.redactions_config.as_ref())
4369 .collect::<Vec<_>>();
4370
4371 iter::from_fn(move || {
4372 let redacted_range = syntax_matches
4373 .peek()
4374 .and_then(|mat| {
4375 configs[mat.grammar_index].and_then(|config| {
4376 mat.captures
4377 .iter()
4378 .find(|capture| capture.index == config.redaction_capture_ix)
4379 })
4380 })
4381 .map(|mat| mat.node.byte_range());
4382 syntax_matches.advance();
4383 redacted_range
4384 })
4385 }
4386
4387 pub fn injections_intersecting_range<T: ToOffset>(
4388 &self,
4389 range: Range<T>,
4390 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4391 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4392
4393 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4394 grammar
4395 .injection_config
4396 .as_ref()
4397 .map(|config| &config.query)
4398 });
4399
4400 let configs = syntax_matches
4401 .grammars()
4402 .iter()
4403 .map(|grammar| grammar.injection_config.as_ref())
4404 .collect::<Vec<_>>();
4405
4406 iter::from_fn(move || {
4407 let ranges = syntax_matches.peek().and_then(|mat| {
4408 let config = &configs[mat.grammar_index]?;
4409 let content_capture_range = mat.captures.iter().find_map(|capture| {
4410 if capture.index == config.content_capture_ix {
4411 Some(capture.node.byte_range())
4412 } else {
4413 None
4414 }
4415 })?;
4416 let language = self.language_at(content_capture_range.start)?;
4417 Some((content_capture_range, language))
4418 });
4419 syntax_matches.advance();
4420 ranges
4421 })
4422 }
4423
4424 pub fn runnable_ranges(
4425 &self,
4426 offset_range: Range<usize>,
4427 ) -> impl Iterator<Item = RunnableRange> + '_ {
4428 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4429 grammar.runnable_config.as_ref().map(|config| &config.query)
4430 });
4431
4432 let test_configs = syntax_matches
4433 .grammars()
4434 .iter()
4435 .map(|grammar| grammar.runnable_config.as_ref())
4436 .collect::<Vec<_>>();
4437
4438 iter::from_fn(move || {
4439 loop {
4440 let mat = syntax_matches.peek()?;
4441
4442 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4443 let mut run_range = None;
4444 let full_range = mat.captures.iter().fold(
4445 Range {
4446 start: usize::MAX,
4447 end: 0,
4448 },
4449 |mut acc, next| {
4450 let byte_range = next.node.byte_range();
4451 if acc.start > byte_range.start {
4452 acc.start = byte_range.start;
4453 }
4454 if acc.end < byte_range.end {
4455 acc.end = byte_range.end;
4456 }
4457 acc
4458 },
4459 );
4460 if full_range.start > full_range.end {
4461 // We did not find a full spanning range of this match.
4462 return None;
4463 }
4464 let extra_captures: SmallVec<[_; 1]> =
4465 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4466 test_configs
4467 .extra_captures
4468 .get(capture.index as usize)
4469 .cloned()
4470 .and_then(|tag_name| match tag_name {
4471 RunnableCapture::Named(name) => {
4472 Some((capture.node.byte_range(), name))
4473 }
4474 RunnableCapture::Run => {
4475 let _ = run_range.insert(capture.node.byte_range());
4476 None
4477 }
4478 })
4479 }));
4480 let run_range = run_range?;
4481 let tags = test_configs
4482 .query
4483 .property_settings(mat.pattern_index)
4484 .iter()
4485 .filter_map(|property| {
4486 if *property.key == *"tag" {
4487 property
4488 .value
4489 .as_ref()
4490 .map(|value| RunnableTag(value.to_string().into()))
4491 } else {
4492 None
4493 }
4494 })
4495 .collect();
4496 let extra_captures = extra_captures
4497 .into_iter()
4498 .map(|(range, name)| {
4499 (
4500 name.to_string(),
4501 self.text_for_range(range).collect::<String>(),
4502 )
4503 })
4504 .collect();
4505 // All tags should have the same range.
4506 Some(RunnableRange {
4507 run_range,
4508 full_range,
4509 runnable: Runnable {
4510 tags,
4511 language: mat.language,
4512 buffer: self.remote_id(),
4513 },
4514 extra_captures,
4515 buffer_id: self.remote_id(),
4516 })
4517 });
4518
4519 syntax_matches.advance();
4520 if test_range.is_some() {
4521 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4522 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4523 return test_range;
4524 }
4525 }
4526 })
4527 }
4528
4529 /// Returns selections for remote peers intersecting the given range.
4530 #[allow(clippy::type_complexity)]
4531 pub fn selections_in_range(
4532 &self,
4533 range: Range<Anchor>,
4534 include_local: bool,
4535 ) -> impl Iterator<
4536 Item = (
4537 ReplicaId,
4538 bool,
4539 CursorShape,
4540 impl Iterator<Item = &Selection<Anchor>> + '_,
4541 ),
4542 > + '_ {
4543 self.remote_selections
4544 .iter()
4545 .filter(move |(replica_id, set)| {
4546 (include_local || **replica_id != self.text.replica_id())
4547 && !set.selections.is_empty()
4548 })
4549 .map(move |(replica_id, set)| {
4550 let start_ix = match set.selections.binary_search_by(|probe| {
4551 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4552 }) {
4553 Ok(ix) | Err(ix) => ix,
4554 };
4555 let end_ix = match set.selections.binary_search_by(|probe| {
4556 probe.start.cmp(&range.end, self).then(Ordering::Less)
4557 }) {
4558 Ok(ix) | Err(ix) => ix,
4559 };
4560
4561 (
4562 *replica_id,
4563 set.line_mode,
4564 set.cursor_shape,
4565 set.selections[start_ix..end_ix].iter(),
4566 )
4567 })
4568 }
4569
4570 /// Returns if the buffer contains any diagnostics.
4571 pub fn has_diagnostics(&self) -> bool {
4572 !self.diagnostics.is_empty()
4573 }
4574
4575 /// Returns all the diagnostics intersecting the given range.
4576 pub fn diagnostics_in_range<'a, T, O>(
4577 &'a self,
4578 search_range: Range<T>,
4579 reversed: bool,
4580 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4581 where
4582 T: 'a + Clone + ToOffset,
4583 O: 'a + FromAnchor,
4584 {
4585 let mut iterators: Vec<_> = self
4586 .diagnostics
4587 .iter()
4588 .map(|(_, collection)| {
4589 collection
4590 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4591 .peekable()
4592 })
4593 .collect();
4594
4595 std::iter::from_fn(move || {
4596 let (next_ix, _) = iterators
4597 .iter_mut()
4598 .enumerate()
4599 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4600 .min_by(|(_, a), (_, b)| {
4601 let cmp = a
4602 .range
4603 .start
4604 .cmp(&b.range.start, self)
4605 // when range is equal, sort by diagnostic severity
4606 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4607 // and stabilize order with group_id
4608 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4609 if reversed { cmp.reverse() } else { cmp }
4610 })?;
4611 iterators[next_ix]
4612 .next()
4613 .map(
4614 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4615 diagnostic,
4616 range: FromAnchor::from_anchor(&range.start, self)
4617 ..FromAnchor::from_anchor(&range.end, self),
4618 },
4619 )
4620 })
4621 }
4622
4623 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4624 /// should be used instead.
4625 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4626 &self.diagnostics
4627 }
4628
4629 /// Returns all the diagnostic groups associated with the given
4630 /// language server ID. If no language server ID is provided,
4631 /// all diagnostics groups are returned.
4632 pub fn diagnostic_groups(
4633 &self,
4634 language_server_id: Option<LanguageServerId>,
4635 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4636 let mut groups = Vec::new();
4637
4638 if let Some(language_server_id) = language_server_id {
4639 if let Ok(ix) = self
4640 .diagnostics
4641 .binary_search_by_key(&language_server_id, |e| e.0)
4642 {
4643 self.diagnostics[ix]
4644 .1
4645 .groups(language_server_id, &mut groups, self);
4646 }
4647 } else {
4648 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4649 diagnostics.groups(*language_server_id, &mut groups, self);
4650 }
4651 }
4652
4653 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4654 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4655 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4656 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4657 });
4658
4659 groups
4660 }
4661
4662 /// Returns an iterator over the diagnostics for the given group.
4663 pub fn diagnostic_group<O>(
4664 &self,
4665 group_id: usize,
4666 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4667 where
4668 O: FromAnchor + 'static,
4669 {
4670 self.diagnostics
4671 .iter()
4672 .flat_map(move |(_, set)| set.group(group_id, self))
4673 }
4674
4675 /// An integer version number that accounts for all updates besides
4676 /// the buffer's text itself (which is versioned via a version vector).
4677 pub fn non_text_state_update_count(&self) -> usize {
4678 self.non_text_state_update_count
4679 }
4680
4681 /// An integer version that changes when the buffer's syntax changes.
4682 pub fn syntax_update_count(&self) -> usize {
4683 self.syntax.update_count()
4684 }
4685
4686 /// Returns a snapshot of underlying file.
4687 pub fn file(&self) -> Option<&Arc<dyn File>> {
4688 self.file.as_ref()
4689 }
4690
4691 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4692 if let Some(file) = self.file() {
4693 if file.path().file_name().is_none() || include_root {
4694 Some(file.full_path(cx).to_string_lossy().into_owned())
4695 } else {
4696 Some(file.path().display(file.path_style(cx)).to_string())
4697 }
4698 } else {
4699 None
4700 }
4701 }
4702
4703 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4704 let query_str = query.fuzzy_contents;
4705 if query_str.is_some_and(|query| query.is_empty()) {
4706 return BTreeMap::default();
4707 }
4708
4709 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4710 language,
4711 override_id: None,
4712 }));
4713
4714 let mut query_ix = 0;
4715 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4716 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4717
4718 let mut words = BTreeMap::default();
4719 let mut current_word_start_ix = None;
4720 let mut chunk_ix = query.range.start;
4721 for chunk in self.chunks(query.range, false) {
4722 for (i, c) in chunk.text.char_indices() {
4723 let ix = chunk_ix + i;
4724 if classifier.is_word(c) {
4725 if current_word_start_ix.is_none() {
4726 current_word_start_ix = Some(ix);
4727 }
4728
4729 if let Some(query_chars) = &query_chars
4730 && query_ix < query_len
4731 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4732 {
4733 query_ix += 1;
4734 }
4735 continue;
4736 } else if let Some(word_start) = current_word_start_ix.take()
4737 && query_ix == query_len
4738 {
4739 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4740 let mut word_text = self.text_for_range(word_start..ix).peekable();
4741 let first_char = word_text
4742 .peek()
4743 .and_then(|first_chunk| first_chunk.chars().next());
4744 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4745 if !query.skip_digits
4746 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4747 {
4748 words.insert(word_text.collect(), word_range);
4749 }
4750 }
4751 query_ix = 0;
4752 }
4753 chunk_ix += chunk.text.len();
4754 }
4755
4756 words
4757 }
4758}
4759
4760pub struct WordsQuery<'a> {
4761 /// Only returns words with all chars from the fuzzy string in them.
4762 pub fuzzy_contents: Option<&'a str>,
4763 /// Skips words that start with a digit.
4764 pub skip_digits: bool,
4765 /// Buffer offset range, to look for words.
4766 pub range: Range<usize>,
4767}
4768
4769fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4770 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4771}
4772
4773fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4774 let mut result = IndentSize::spaces(0);
4775 for c in text {
4776 let kind = match c {
4777 ' ' => IndentKind::Space,
4778 '\t' => IndentKind::Tab,
4779 _ => break,
4780 };
4781 if result.len == 0 {
4782 result.kind = kind;
4783 }
4784 result.len += 1;
4785 }
4786 result
4787}
4788
4789impl Clone for BufferSnapshot {
4790 fn clone(&self) -> Self {
4791 Self {
4792 text: self.text.clone(),
4793 syntax: self.syntax.clone(),
4794 file: self.file.clone(),
4795 remote_selections: self.remote_selections.clone(),
4796 diagnostics: self.diagnostics.clone(),
4797 language: self.language.clone(),
4798 non_text_state_update_count: self.non_text_state_update_count,
4799 }
4800 }
4801}
4802
4803impl Deref for BufferSnapshot {
4804 type Target = text::BufferSnapshot;
4805
4806 fn deref(&self) -> &Self::Target {
4807 &self.text
4808 }
4809}
4810
4811unsafe impl Send for BufferChunks<'_> {}
4812
4813impl<'a> BufferChunks<'a> {
4814 pub(crate) fn new(
4815 text: &'a Rope,
4816 range: Range<usize>,
4817 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4818 diagnostics: bool,
4819 buffer_snapshot: Option<&'a BufferSnapshot>,
4820 ) -> Self {
4821 let mut highlights = None;
4822 if let Some((captures, highlight_maps)) = syntax {
4823 highlights = Some(BufferChunkHighlights {
4824 captures,
4825 next_capture: None,
4826 stack: Default::default(),
4827 highlight_maps,
4828 })
4829 }
4830
4831 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4832 let chunks = text.chunks_in_range(range.clone());
4833
4834 let mut this = BufferChunks {
4835 range,
4836 buffer_snapshot,
4837 chunks,
4838 diagnostic_endpoints,
4839 error_depth: 0,
4840 warning_depth: 0,
4841 information_depth: 0,
4842 hint_depth: 0,
4843 unnecessary_depth: 0,
4844 underline: true,
4845 highlights,
4846 };
4847 this.initialize_diagnostic_endpoints();
4848 this
4849 }
4850
4851 /// Seeks to the given byte offset in the buffer.
4852 pub fn seek(&mut self, range: Range<usize>) {
4853 let old_range = std::mem::replace(&mut self.range, range.clone());
4854 self.chunks.set_range(self.range.clone());
4855 if let Some(highlights) = self.highlights.as_mut() {
4856 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4857 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4858 highlights
4859 .stack
4860 .retain(|(end_offset, _)| *end_offset > range.start);
4861 if let Some(capture) = &highlights.next_capture
4862 && range.start >= capture.node.start_byte()
4863 {
4864 let next_capture_end = capture.node.end_byte();
4865 if range.start < next_capture_end {
4866 highlights.stack.push((
4867 next_capture_end,
4868 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4869 ));
4870 }
4871 highlights.next_capture.take();
4872 }
4873 } else if let Some(snapshot) = self.buffer_snapshot {
4874 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4875 *highlights = BufferChunkHighlights {
4876 captures,
4877 next_capture: None,
4878 stack: Default::default(),
4879 highlight_maps,
4880 };
4881 } else {
4882 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4883 // Seeking such BufferChunks is not supported.
4884 debug_assert!(
4885 false,
4886 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4887 );
4888 }
4889
4890 highlights.captures.set_byte_range(self.range.clone());
4891 self.initialize_diagnostic_endpoints();
4892 }
4893 }
4894
4895 fn initialize_diagnostic_endpoints(&mut self) {
4896 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4897 && let Some(buffer) = self.buffer_snapshot
4898 {
4899 let mut diagnostic_endpoints = Vec::new();
4900 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4901 diagnostic_endpoints.push(DiagnosticEndpoint {
4902 offset: entry.range.start,
4903 is_start: true,
4904 severity: entry.diagnostic.severity,
4905 is_unnecessary: entry.diagnostic.is_unnecessary,
4906 underline: entry.diagnostic.underline,
4907 });
4908 diagnostic_endpoints.push(DiagnosticEndpoint {
4909 offset: entry.range.end,
4910 is_start: false,
4911 severity: entry.diagnostic.severity,
4912 is_unnecessary: entry.diagnostic.is_unnecessary,
4913 underline: entry.diagnostic.underline,
4914 });
4915 }
4916 diagnostic_endpoints
4917 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4918 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4919 self.hint_depth = 0;
4920 self.error_depth = 0;
4921 self.warning_depth = 0;
4922 self.information_depth = 0;
4923 }
4924 }
4925
4926 /// The current byte offset in the buffer.
4927 pub fn offset(&self) -> usize {
4928 self.range.start
4929 }
4930
4931 pub fn range(&self) -> Range<usize> {
4932 self.range.clone()
4933 }
4934
4935 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4936 let depth = match endpoint.severity {
4937 DiagnosticSeverity::ERROR => &mut self.error_depth,
4938 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4939 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4940 DiagnosticSeverity::HINT => &mut self.hint_depth,
4941 _ => return,
4942 };
4943 if endpoint.is_start {
4944 *depth += 1;
4945 } else {
4946 *depth -= 1;
4947 }
4948
4949 if endpoint.is_unnecessary {
4950 if endpoint.is_start {
4951 self.unnecessary_depth += 1;
4952 } else {
4953 self.unnecessary_depth -= 1;
4954 }
4955 }
4956 }
4957
4958 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4959 if self.error_depth > 0 {
4960 Some(DiagnosticSeverity::ERROR)
4961 } else if self.warning_depth > 0 {
4962 Some(DiagnosticSeverity::WARNING)
4963 } else if self.information_depth > 0 {
4964 Some(DiagnosticSeverity::INFORMATION)
4965 } else if self.hint_depth > 0 {
4966 Some(DiagnosticSeverity::HINT)
4967 } else {
4968 None
4969 }
4970 }
4971
4972 fn current_code_is_unnecessary(&self) -> bool {
4973 self.unnecessary_depth > 0
4974 }
4975}
4976
4977impl<'a> Iterator for BufferChunks<'a> {
4978 type Item = Chunk<'a>;
4979
4980 fn next(&mut self) -> Option<Self::Item> {
4981 let mut next_capture_start = usize::MAX;
4982 let mut next_diagnostic_endpoint = usize::MAX;
4983
4984 if let Some(highlights) = self.highlights.as_mut() {
4985 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4986 if *parent_capture_end <= self.range.start {
4987 highlights.stack.pop();
4988 } else {
4989 break;
4990 }
4991 }
4992
4993 if highlights.next_capture.is_none() {
4994 highlights.next_capture = highlights.captures.next();
4995 }
4996
4997 while let Some(capture) = highlights.next_capture.as_ref() {
4998 if self.range.start < capture.node.start_byte() {
4999 next_capture_start = capture.node.start_byte();
5000 break;
5001 } else {
5002 let highlight_id =
5003 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5004 highlights
5005 .stack
5006 .push((capture.node.end_byte(), highlight_id));
5007 highlights.next_capture = highlights.captures.next();
5008 }
5009 }
5010 }
5011
5012 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5013 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5014 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5015 if endpoint.offset <= self.range.start {
5016 self.update_diagnostic_depths(endpoint);
5017 diagnostic_endpoints.next();
5018 self.underline = endpoint.underline;
5019 } else {
5020 next_diagnostic_endpoint = endpoint.offset;
5021 break;
5022 }
5023 }
5024 }
5025 self.diagnostic_endpoints = diagnostic_endpoints;
5026
5027 if let Some(ChunkBitmaps {
5028 text: chunk,
5029 chars: chars_map,
5030 tabs,
5031 }) = self.chunks.peek_with_bitmaps()
5032 {
5033 let chunk_start = self.range.start;
5034 let mut chunk_end = (self.chunks.offset() + chunk.len())
5035 .min(next_capture_start)
5036 .min(next_diagnostic_endpoint);
5037 let mut highlight_id = None;
5038 if let Some(highlights) = self.highlights.as_ref()
5039 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5040 {
5041 chunk_end = chunk_end.min(*parent_capture_end);
5042 highlight_id = Some(*parent_highlight_id);
5043 }
5044 let bit_start = chunk_start - self.chunks.offset();
5045 let bit_end = chunk_end - self.chunks.offset();
5046
5047 let slice = &chunk[bit_start..bit_end];
5048
5049 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5050 let tabs = (tabs >> bit_start) & mask;
5051 let chars = (chars_map >> bit_start) & mask;
5052
5053 self.range.start = chunk_end;
5054 if self.range.start == self.chunks.offset() + chunk.len() {
5055 self.chunks.next().unwrap();
5056 }
5057
5058 Some(Chunk {
5059 text: slice,
5060 syntax_highlight_id: highlight_id,
5061 underline: self.underline,
5062 diagnostic_severity: self.current_diagnostic_severity(),
5063 is_unnecessary: self.current_code_is_unnecessary(),
5064 tabs,
5065 chars,
5066 ..Chunk::default()
5067 })
5068 } else {
5069 None
5070 }
5071 }
5072}
5073
5074impl operation_queue::Operation for Operation {
5075 fn lamport_timestamp(&self) -> clock::Lamport {
5076 match self {
5077 Operation::Buffer(_) => {
5078 unreachable!("buffer operations should never be deferred at this layer")
5079 }
5080 Operation::UpdateDiagnostics {
5081 lamport_timestamp, ..
5082 }
5083 | Operation::UpdateSelections {
5084 lamport_timestamp, ..
5085 }
5086 | Operation::UpdateCompletionTriggers {
5087 lamport_timestamp, ..
5088 }
5089 | Operation::UpdateLineEnding {
5090 lamport_timestamp, ..
5091 } => *lamport_timestamp,
5092 }
5093 }
5094}
5095
5096impl Default for Diagnostic {
5097 fn default() -> Self {
5098 Self {
5099 source: Default::default(),
5100 source_kind: DiagnosticSourceKind::Other,
5101 code: None,
5102 code_description: None,
5103 severity: DiagnosticSeverity::ERROR,
5104 message: Default::default(),
5105 markdown: None,
5106 group_id: 0,
5107 is_primary: false,
5108 is_disk_based: false,
5109 is_unnecessary: false,
5110 underline: true,
5111 data: None,
5112 }
5113 }
5114}
5115
5116impl IndentSize {
5117 /// Returns an [`IndentSize`] representing the given spaces.
5118 pub fn spaces(len: u32) -> Self {
5119 Self {
5120 len,
5121 kind: IndentKind::Space,
5122 }
5123 }
5124
5125 /// Returns an [`IndentSize`] representing a tab.
5126 pub fn tab() -> Self {
5127 Self {
5128 len: 1,
5129 kind: IndentKind::Tab,
5130 }
5131 }
5132
5133 /// An iterator over the characters represented by this [`IndentSize`].
5134 pub fn chars(&self) -> impl Iterator<Item = char> {
5135 iter::repeat(self.char()).take(self.len as usize)
5136 }
5137
5138 /// The character representation of this [`IndentSize`].
5139 pub fn char(&self) -> char {
5140 match self.kind {
5141 IndentKind::Space => ' ',
5142 IndentKind::Tab => '\t',
5143 }
5144 }
5145
5146 /// Consumes the current [`IndentSize`] and returns a new one that has
5147 /// been shrunk or enlarged by the given size along the given direction.
5148 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5149 match direction {
5150 Ordering::Less => {
5151 if self.kind == size.kind && self.len >= size.len {
5152 self.len -= size.len;
5153 }
5154 }
5155 Ordering::Equal => {}
5156 Ordering::Greater => {
5157 if self.len == 0 {
5158 self = size;
5159 } else if self.kind == size.kind {
5160 self.len += size.len;
5161 }
5162 }
5163 }
5164 self
5165 }
5166
5167 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5168 match self.kind {
5169 IndentKind::Space => self.len as usize,
5170 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5171 }
5172 }
5173}
5174
5175#[cfg(any(test, feature = "test-support"))]
5176pub struct TestFile {
5177 pub path: Arc<RelPath>,
5178 pub root_name: String,
5179 pub local_root: Option<PathBuf>,
5180}
5181
5182#[cfg(any(test, feature = "test-support"))]
5183impl File for TestFile {
5184 fn path(&self) -> &Arc<RelPath> {
5185 &self.path
5186 }
5187
5188 fn full_path(&self, _: &gpui::App) -> PathBuf {
5189 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5190 }
5191
5192 fn as_local(&self) -> Option<&dyn LocalFile> {
5193 if self.local_root.is_some() {
5194 Some(self)
5195 } else {
5196 None
5197 }
5198 }
5199
5200 fn disk_state(&self) -> DiskState {
5201 unimplemented!()
5202 }
5203
5204 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5205 self.path().file_name().unwrap_or(self.root_name.as_ref())
5206 }
5207
5208 fn worktree_id(&self, _: &App) -> WorktreeId {
5209 WorktreeId::from_usize(0)
5210 }
5211
5212 fn to_proto(&self, _: &App) -> rpc::proto::File {
5213 unimplemented!()
5214 }
5215
5216 fn is_private(&self) -> bool {
5217 false
5218 }
5219
5220 fn path_style(&self, _cx: &App) -> PathStyle {
5221 PathStyle::local()
5222 }
5223}
5224
5225#[cfg(any(test, feature = "test-support"))]
5226impl LocalFile for TestFile {
5227 fn abs_path(&self, _cx: &App) -> PathBuf {
5228 PathBuf::from(self.local_root.as_ref().unwrap())
5229 .join(&self.root_name)
5230 .join(self.path.as_std_path())
5231 }
5232
5233 fn load(&self, _cx: &App) -> Task<Result<String>> {
5234 unimplemented!()
5235 }
5236
5237 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5238 unimplemented!()
5239 }
5240
5241 fn load_with_encoding(&self, _: &App, _: &'static Encoding) -> Task<Result<String>> {
5242 unimplemented!()
5243 }
5244}
5245
5246pub(crate) fn contiguous_ranges(
5247 values: impl Iterator<Item = u32>,
5248 max_len: usize,
5249) -> impl Iterator<Item = Range<u32>> {
5250 let mut values = values;
5251 let mut current_range: Option<Range<u32>> = None;
5252 std::iter::from_fn(move || {
5253 loop {
5254 if let Some(value) = values.next() {
5255 if let Some(range) = &mut current_range
5256 && value == range.end
5257 && range.len() < max_len
5258 {
5259 range.end += 1;
5260 continue;
5261 }
5262
5263 let prev_range = current_range.clone();
5264 current_range = Some(value..(value + 1));
5265 if prev_range.is_some() {
5266 return prev_range;
5267 }
5268 } else {
5269 return current_range.take();
5270 }
5271 }
5272 })
5273}
5274
5275#[derive(Default, Debug)]
5276pub struct CharClassifier {
5277 scope: Option<LanguageScope>,
5278 scope_context: Option<CharScopeContext>,
5279 ignore_punctuation: bool,
5280}
5281
5282impl CharClassifier {
5283 pub fn new(scope: Option<LanguageScope>) -> Self {
5284 Self {
5285 scope,
5286 scope_context: None,
5287 ignore_punctuation: false,
5288 }
5289 }
5290
5291 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5292 Self {
5293 scope_context,
5294 ..self
5295 }
5296 }
5297
5298 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5299 Self {
5300 ignore_punctuation,
5301 ..self
5302 }
5303 }
5304
5305 pub fn is_whitespace(&self, c: char) -> bool {
5306 self.kind(c) == CharKind::Whitespace
5307 }
5308
5309 pub fn is_word(&self, c: char) -> bool {
5310 self.kind(c) == CharKind::Word
5311 }
5312
5313 pub fn is_punctuation(&self, c: char) -> bool {
5314 self.kind(c) == CharKind::Punctuation
5315 }
5316
5317 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5318 if c.is_alphanumeric() || c == '_' {
5319 return CharKind::Word;
5320 }
5321
5322 if let Some(scope) = &self.scope {
5323 let characters = match self.scope_context {
5324 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5325 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5326 None => scope.word_characters(),
5327 };
5328 if let Some(characters) = characters
5329 && characters.contains(&c)
5330 {
5331 return CharKind::Word;
5332 }
5333 }
5334
5335 if c.is_whitespace() {
5336 return CharKind::Whitespace;
5337 }
5338
5339 if ignore_punctuation {
5340 CharKind::Word
5341 } else {
5342 CharKind::Punctuation
5343 }
5344 }
5345
5346 pub fn kind(&self, c: char) -> CharKind {
5347 self.kind_with(c, self.ignore_punctuation)
5348 }
5349}
5350
5351/// Find all of the ranges of whitespace that occur at the ends of lines
5352/// in the given rope.
5353///
5354/// This could also be done with a regex search, but this implementation
5355/// avoids copying text.
5356pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5357 let mut ranges = Vec::new();
5358
5359 let mut offset = 0;
5360 let mut prev_chunk_trailing_whitespace_range = 0..0;
5361 for chunk in rope.chunks() {
5362 let mut prev_line_trailing_whitespace_range = 0..0;
5363 for (i, line) in chunk.split('\n').enumerate() {
5364 let line_end_offset = offset + line.len();
5365 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5366 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5367
5368 if i == 0 && trimmed_line_len == 0 {
5369 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5370 }
5371 if !prev_line_trailing_whitespace_range.is_empty() {
5372 ranges.push(prev_line_trailing_whitespace_range);
5373 }
5374
5375 offset = line_end_offset + 1;
5376 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5377 }
5378
5379 offset -= 1;
5380 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5381 }
5382
5383 if !prev_chunk_trailing_whitespace_range.is_empty() {
5384 ranges.push(prev_chunk_trailing_whitespace_range);
5385 }
5386
5387 ranges
5388}