1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<std::sync::Mutex<&'static Encoding>>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(&self, cx: &App) -> Task<Result<String>>;
418
419 /// Loads the file's contents from disk.
420 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
421}
422
423/// The auto-indent behavior associated with an editing operation.
424/// For some editing operations, each affected line of text has its
425/// indentation recomputed. For other operations, the entire block
426/// of edited text is adjusted uniformly.
427#[derive(Clone, Debug)]
428pub enum AutoindentMode {
429 /// Indent each line of inserted text.
430 EachLine,
431 /// Apply the same indentation adjustment to all of the lines
432 /// in a given insertion.
433 Block {
434 /// The original indentation column of the first line of each
435 /// insertion, if it has been copied.
436 ///
437 /// Knowing this makes it possible to preserve the relative indentation
438 /// of every line in the insertion from when it was copied.
439 ///
440 /// If the original indent column is `a`, and the first line of insertion
441 /// is then auto-indented to column `b`, then every other line of
442 /// the insertion will be auto-indented to column `b - a`
443 original_indent_columns: Vec<Option<u32>>,
444 },
445}
446
447#[derive(Clone)]
448struct AutoindentRequest {
449 before_edit: BufferSnapshot,
450 entries: Vec<AutoindentRequestEntry>,
451 is_block_mode: bool,
452 ignore_empty_lines: bool,
453}
454
455#[derive(Debug, Clone)]
456struct AutoindentRequestEntry {
457 /// A range of the buffer whose indentation should be adjusted.
458 range: Range<Anchor>,
459 /// Whether or not these lines should be considered brand new, for the
460 /// purpose of auto-indent. When text is not new, its indentation will
461 /// only be adjusted if the suggested indentation level has *changed*
462 /// since the edit was made.
463 first_line_is_new: bool,
464 indent_size: IndentSize,
465 original_indent_column: Option<u32>,
466}
467
468#[derive(Debug)]
469struct IndentSuggestion {
470 basis_row: u32,
471 delta: Ordering,
472 within_error: bool,
473}
474
475struct BufferChunkHighlights<'a> {
476 captures: SyntaxMapCaptures<'a>,
477 next_capture: Option<SyntaxMapCapture<'a>>,
478 stack: Vec<(usize, HighlightId)>,
479 highlight_maps: Vec<HighlightMap>,
480}
481
482/// An iterator that yields chunks of a buffer's text, along with their
483/// syntax highlights and diagnostic status.
484pub struct BufferChunks<'a> {
485 buffer_snapshot: Option<&'a BufferSnapshot>,
486 range: Range<usize>,
487 chunks: text::Chunks<'a>,
488 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
489 error_depth: usize,
490 warning_depth: usize,
491 information_depth: usize,
492 hint_depth: usize,
493 unnecessary_depth: usize,
494 underline: bool,
495 highlights: Option<BufferChunkHighlights<'a>>,
496}
497
498/// A chunk of a buffer's text, along with its syntax highlight and
499/// diagnostic status.
500#[derive(Clone, Debug, Default)]
501pub struct Chunk<'a> {
502 /// The text of the chunk.
503 pub text: &'a str,
504 /// The syntax highlighting style of the chunk.
505 pub syntax_highlight_id: Option<HighlightId>,
506 /// The highlight style that has been applied to this chunk in
507 /// the editor.
508 pub highlight_style: Option<HighlightStyle>,
509 /// The severity of diagnostic associated with this chunk, if any.
510 pub diagnostic_severity: Option<DiagnosticSeverity>,
511 /// A bitset of which characters are tabs in this string.
512 pub tabs: u128,
513 /// Bitmap of character indices in this chunk
514 pub chars: u128,
515 /// Whether this chunk of text is marked as unnecessary.
516 pub is_unnecessary: bool,
517 /// Whether this chunk of text was originally a tab character.
518 pub is_tab: bool,
519 /// Whether this chunk of text was originally an inlay.
520 pub is_inlay: bool,
521 /// Whether to underline the corresponding text range in the editor.
522 pub underline: bool,
523}
524
525/// A set of edits to a given version of a buffer, computed asynchronously.
526#[derive(Debug)]
527pub struct Diff {
528 pub base_version: clock::Global,
529 pub line_ending: LineEnding,
530 pub edits: Vec<(Range<usize>, Arc<str>)>,
531}
532
533#[derive(Debug, Clone, Copy)]
534pub(crate) struct DiagnosticEndpoint {
535 offset: usize,
536 is_start: bool,
537 underline: bool,
538 severity: DiagnosticSeverity,
539 is_unnecessary: bool,
540}
541
542/// A class of characters, used for characterizing a run of text.
543#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
544pub enum CharKind {
545 /// Whitespace.
546 Whitespace,
547 /// Punctuation.
548 Punctuation,
549 /// Word.
550 Word,
551}
552
553/// Context for character classification within a specific scope.
554#[derive(Copy, Clone, Eq, PartialEq, Debug)]
555pub enum CharScopeContext {
556 /// Character classification for completion queries.
557 ///
558 /// This context treats certain characters as word constituents that would
559 /// normally be considered punctuation, such as '-' in Tailwind classes
560 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
561 Completion,
562 /// Character classification for linked edits.
563 ///
564 /// This context handles characters that should be treated as part of
565 /// identifiers during linked editing operations, such as '.' in JSX
566 /// component names like `<Animated.View>`.
567 LinkedEdit,
568}
569
570/// A runnable is a set of data about a region that could be resolved into a task
571pub struct Runnable {
572 pub tags: SmallVec<[RunnableTag; 1]>,
573 pub language: Arc<Language>,
574 pub buffer: BufferId,
575}
576
577#[derive(Default, Clone, Debug)]
578pub struct HighlightedText {
579 pub text: SharedString,
580 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
581}
582
583#[derive(Default, Debug)]
584struct HighlightedTextBuilder {
585 pub text: String,
586 highlights: Vec<(Range<usize>, HighlightStyle)>,
587}
588
589impl HighlightedText {
590 pub fn from_buffer_range<T: ToOffset>(
591 range: Range<T>,
592 snapshot: &text::BufferSnapshot,
593 syntax_snapshot: &SyntaxSnapshot,
594 override_style: Option<HighlightStyle>,
595 syntax_theme: &SyntaxTheme,
596 ) -> Self {
597 let mut highlighted_text = HighlightedTextBuilder::default();
598 highlighted_text.add_text_from_buffer_range(
599 range,
600 snapshot,
601 syntax_snapshot,
602 override_style,
603 syntax_theme,
604 );
605 highlighted_text.build()
606 }
607
608 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
609 gpui::StyledText::new(self.text.clone())
610 .with_default_highlights(default_style, self.highlights.iter().cloned())
611 }
612
613 /// Returns the first line without leading whitespace unless highlighted
614 /// and a boolean indicating if there are more lines after
615 pub fn first_line_preview(self) -> (Self, bool) {
616 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
617 let first_line = &self.text[..newline_ix];
618
619 // Trim leading whitespace, unless an edit starts prior to it.
620 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
621 if let Some((first_highlight_range, _)) = self.highlights.first() {
622 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
623 }
624
625 let preview_text = &first_line[preview_start_ix..];
626 let preview_highlights = self
627 .highlights
628 .into_iter()
629 .skip_while(|(range, _)| range.end <= preview_start_ix)
630 .take_while(|(range, _)| range.start < newline_ix)
631 .filter_map(|(mut range, highlight)| {
632 range.start = range.start.saturating_sub(preview_start_ix);
633 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
634 if range.is_empty() {
635 None
636 } else {
637 Some((range, highlight))
638 }
639 });
640
641 let preview = Self {
642 text: SharedString::new(preview_text),
643 highlights: preview_highlights.collect(),
644 };
645
646 (preview, self.text.len() > newline_ix)
647 }
648}
649
650impl HighlightedTextBuilder {
651 pub fn build(self) -> HighlightedText {
652 HighlightedText {
653 text: self.text.into(),
654 highlights: self.highlights,
655 }
656 }
657
658 pub fn add_text_from_buffer_range<T: ToOffset>(
659 &mut self,
660 range: Range<T>,
661 snapshot: &text::BufferSnapshot,
662 syntax_snapshot: &SyntaxSnapshot,
663 override_style: Option<HighlightStyle>,
664 syntax_theme: &SyntaxTheme,
665 ) {
666 let range = range.to_offset(snapshot);
667 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
668 let start = self.text.len();
669 self.text.push_str(chunk.text);
670 let end = self.text.len();
671
672 if let Some(highlight_style) = chunk
673 .syntax_highlight_id
674 .and_then(|id| id.style(syntax_theme))
675 {
676 let highlight_style = override_style.map_or(highlight_style, |override_style| {
677 highlight_style.highlight(override_style)
678 });
679 self.highlights.push((start..end, highlight_style));
680 } else if let Some(override_style) = override_style {
681 self.highlights.push((start..end, override_style));
682 }
683 }
684 }
685
686 fn highlighted_chunks<'a>(
687 range: Range<usize>,
688 snapshot: &'a text::BufferSnapshot,
689 syntax_snapshot: &'a SyntaxSnapshot,
690 ) -> BufferChunks<'a> {
691 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
692 grammar
693 .highlights_config
694 .as_ref()
695 .map(|config| &config.query)
696 });
697
698 let highlight_maps = captures
699 .grammars()
700 .iter()
701 .map(|grammar| grammar.highlight_map())
702 .collect();
703
704 BufferChunks::new(
705 snapshot.as_rope(),
706 range,
707 Some((captures, highlight_maps)),
708 false,
709 None,
710 )
711 }
712}
713
714#[derive(Clone)]
715pub struct EditPreview {
716 old_snapshot: text::BufferSnapshot,
717 applied_edits_snapshot: text::BufferSnapshot,
718 syntax_snapshot: SyntaxSnapshot,
719}
720
721impl EditPreview {
722 pub fn highlight_edits(
723 &self,
724 current_snapshot: &BufferSnapshot,
725 edits: &[(Range<Anchor>, String)],
726 include_deletions: bool,
727 cx: &App,
728 ) -> HighlightedText {
729 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
730 return HighlightedText::default();
731 };
732
733 let mut highlighted_text = HighlightedTextBuilder::default();
734
735 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
736
737 let insertion_highlight_style = HighlightStyle {
738 background_color: Some(cx.theme().status().created_background),
739 ..Default::default()
740 };
741 let deletion_highlight_style = HighlightStyle {
742 background_color: Some(cx.theme().status().deleted_background),
743 ..Default::default()
744 };
745 let syntax_theme = cx.theme().syntax();
746
747 for (range, edit_text) in edits {
748 let edit_new_end_in_preview_snapshot = range
749 .end
750 .bias_right(&self.old_snapshot)
751 .to_offset(&self.applied_edits_snapshot);
752 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
753
754 let unchanged_range_in_preview_snapshot =
755 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
756 if !unchanged_range_in_preview_snapshot.is_empty() {
757 highlighted_text.add_text_from_buffer_range(
758 unchanged_range_in_preview_snapshot,
759 &self.applied_edits_snapshot,
760 &self.syntax_snapshot,
761 None,
762 syntax_theme,
763 );
764 }
765
766 let range_in_current_snapshot = range.to_offset(current_snapshot);
767 if include_deletions && !range_in_current_snapshot.is_empty() {
768 highlighted_text.add_text_from_buffer_range(
769 range_in_current_snapshot,
770 ¤t_snapshot.text,
771 ¤t_snapshot.syntax,
772 Some(deletion_highlight_style),
773 syntax_theme,
774 );
775 }
776
777 if !edit_text.is_empty() {
778 highlighted_text.add_text_from_buffer_range(
779 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
780 &self.applied_edits_snapshot,
781 &self.syntax_snapshot,
782 Some(insertion_highlight_style),
783 syntax_theme,
784 );
785 }
786
787 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
788 }
789
790 highlighted_text.add_text_from_buffer_range(
791 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
792 &self.applied_edits_snapshot,
793 &self.syntax_snapshot,
794 None,
795 syntax_theme,
796 );
797
798 highlighted_text.build()
799 }
800
801 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
802 let (first, _) = edits.first()?;
803 let (last, _) = edits.last()?;
804
805 let start = first
806 .start
807 .bias_left(&self.old_snapshot)
808 .to_point(&self.applied_edits_snapshot);
809 let end = last
810 .end
811 .bias_right(&self.old_snapshot)
812 .to_point(&self.applied_edits_snapshot);
813
814 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
815 let range = Point::new(start.row, 0)
816 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
817
818 Some(range.to_offset(&self.applied_edits_snapshot))
819 }
820}
821
822#[derive(Clone, Debug, PartialEq, Eq)]
823pub struct BracketMatch {
824 pub open_range: Range<usize>,
825 pub close_range: Range<usize>,
826 pub newline_only: bool,
827}
828
829impl Buffer {
830 /// Create a new buffer with the given base text.
831 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
832 Self::build(
833 TextBuffer::new(
834 ReplicaId::LOCAL,
835 cx.entity_id().as_non_zero_u64().into(),
836 base_text.into(),
837 &cx.background_executor(),
838 ),
839 None,
840 Capability::ReadWrite,
841 )
842 }
843
844 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
845 pub fn local_normalized(
846 base_text_normalized: Rope,
847 line_ending: LineEnding,
848 cx: &Context<Self>,
849 ) -> Self {
850 Self::build(
851 TextBuffer::new_normalized(
852 ReplicaId::LOCAL,
853 cx.entity_id().as_non_zero_u64().into(),
854 line_ending,
855 base_text_normalized,
856 ),
857 None,
858 Capability::ReadWrite,
859 )
860 }
861
862 /// Create a new buffer that is a replica of a remote buffer.
863 pub fn remote(
864 remote_id: BufferId,
865 replica_id: ReplicaId,
866 capability: Capability,
867 base_text: impl Into<String>,
868 cx: &BackgroundExecutor,
869 ) -> Self {
870 Self::build(
871 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
872 None,
873 capability,
874 )
875 }
876
877 /// Create a new buffer that is a replica of a remote buffer, populating its
878 /// state from the given protobuf message.
879 pub fn from_proto(
880 replica_id: ReplicaId,
881 capability: Capability,
882 message: proto::BufferState,
883 file: Option<Arc<dyn File>>,
884 cx: &BackgroundExecutor,
885 ) -> Result<Self> {
886 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
887 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
888 let mut this = Self::build(buffer, file, capability);
889 this.text.set_line_ending(proto::deserialize_line_ending(
890 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
891 ));
892 this.saved_version = proto::deserialize_version(&message.saved_version);
893 this.saved_mtime = message.saved_mtime.map(|time| time.into());
894 Ok(this)
895 }
896
897 /// Serialize the buffer's state to a protobuf message.
898 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
899 proto::BufferState {
900 id: self.remote_id().into(),
901 file: self.file.as_ref().map(|f| f.to_proto(cx)),
902 base_text: self.base_text().to_string(),
903 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
904 saved_version: proto::serialize_version(&self.saved_version),
905 saved_mtime: self.saved_mtime.map(|time| time.into()),
906 }
907 }
908
909 /// Serialize as protobufs all of the changes to the buffer since the given version.
910 pub fn serialize_ops(
911 &self,
912 since: Option<clock::Global>,
913 cx: &App,
914 ) -> Task<Vec<proto::Operation>> {
915 let mut operations = Vec::new();
916 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
917
918 operations.extend(self.remote_selections.iter().map(|(_, set)| {
919 proto::serialize_operation(&Operation::UpdateSelections {
920 selections: set.selections.clone(),
921 lamport_timestamp: set.lamport_timestamp,
922 line_mode: set.line_mode,
923 cursor_shape: set.cursor_shape,
924 })
925 }));
926
927 for (server_id, diagnostics) in &self.diagnostics {
928 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
929 lamport_timestamp: self.diagnostics_timestamp,
930 server_id: *server_id,
931 diagnostics: diagnostics.iter().cloned().collect(),
932 }));
933 }
934
935 for (server_id, completions) in &self.completion_triggers_per_language_server {
936 operations.push(proto::serialize_operation(
937 &Operation::UpdateCompletionTriggers {
938 triggers: completions.iter().cloned().collect(),
939 lamport_timestamp: self.completion_triggers_timestamp,
940 server_id: *server_id,
941 },
942 ));
943 }
944
945 let text_operations = self.text.operations().clone();
946 cx.background_spawn(async move {
947 let since = since.unwrap_or_default();
948 operations.extend(
949 text_operations
950 .iter()
951 .filter(|(_, op)| !since.observed(op.timestamp()))
952 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
953 );
954 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
955 operations
956 })
957 }
958
959 /// Assign a language to the buffer, returning the buffer.
960 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
961 self.set_language(Some(language), cx);
962 self
963 }
964
965 /// Returns the [`Capability`] of this buffer.
966 pub fn capability(&self) -> Capability {
967 self.capability
968 }
969
970 /// Whether this buffer can only be read.
971 pub fn read_only(&self) -> bool {
972 self.capability == Capability::ReadOnly
973 }
974
975 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
976 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
977 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
978 let snapshot = buffer.snapshot();
979 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
980 Self {
981 saved_mtime,
982 saved_version: buffer.version(),
983 preview_version: buffer.version(),
984 reload_task: None,
985 transaction_depth: 0,
986 was_dirty_before_starting_transaction: None,
987 has_unsaved_edits: Cell::new((buffer.version(), false)),
988 text: buffer,
989 branch_state: None,
990 file,
991 capability,
992 syntax_map,
993 reparse: None,
994 non_text_state_update_count: 0,
995 sync_parse_timeout: Duration::from_millis(1),
996 parse_status: watch::channel(ParseStatus::Idle),
997 autoindent_requests: Default::default(),
998 wait_for_autoindent_txs: Default::default(),
999 pending_autoindent: Default::default(),
1000 language: None,
1001 remote_selections: Default::default(),
1002 diagnostics: Default::default(),
1003 diagnostics_timestamp: Lamport::MIN,
1004 completion_triggers: Default::default(),
1005 completion_triggers_per_language_server: Default::default(),
1006 completion_triggers_timestamp: Lamport::MIN,
1007 deferred_ops: OperationQueue::new(),
1008 has_conflict: false,
1009 change_bits: Default::default(),
1010 _subscriptions: Vec::new(),
1011 encoding: Arc::new(std::sync::Mutex::new(encoding_rs::UTF_8)),
1012 }
1013 }
1014
1015 pub fn build_snapshot(
1016 text: Rope,
1017 language: Option<Arc<Language>>,
1018 language_registry: Option<Arc<LanguageRegistry>>,
1019 cx: &mut App,
1020 ) -> impl Future<Output = BufferSnapshot> + use<> {
1021 let entity_id = cx.reserve_entity::<Self>().entity_id();
1022 let buffer_id = entity_id.as_non_zero_u64().into();
1023 async move {
1024 let text =
1025 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1026 .snapshot();
1027 let mut syntax = SyntaxMap::new(&text).snapshot();
1028 if let Some(language) = language.clone() {
1029 let language_registry = language_registry.clone();
1030 syntax.reparse(&text, language_registry, language);
1031 }
1032 BufferSnapshot {
1033 text,
1034 syntax,
1035 file: None,
1036 diagnostics: Default::default(),
1037 remote_selections: Default::default(),
1038 language,
1039 non_text_state_update_count: 0,
1040 }
1041 }
1042 }
1043
1044 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1045 let entity_id = cx.reserve_entity::<Self>().entity_id();
1046 let buffer_id = entity_id.as_non_zero_u64().into();
1047 let text = TextBuffer::new_normalized(
1048 ReplicaId::LOCAL,
1049 buffer_id,
1050 Default::default(),
1051 Rope::new(),
1052 )
1053 .snapshot();
1054 let syntax = SyntaxMap::new(&text).snapshot();
1055 BufferSnapshot {
1056 text,
1057 syntax,
1058 file: None,
1059 diagnostics: Default::default(),
1060 remote_selections: Default::default(),
1061 language: None,
1062 non_text_state_update_count: 0,
1063 }
1064 }
1065
1066 #[cfg(any(test, feature = "test-support"))]
1067 pub fn build_snapshot_sync(
1068 text: Rope,
1069 language: Option<Arc<Language>>,
1070 language_registry: Option<Arc<LanguageRegistry>>,
1071 cx: &mut App,
1072 ) -> BufferSnapshot {
1073 let entity_id = cx.reserve_entity::<Self>().entity_id();
1074 let buffer_id = entity_id.as_non_zero_u64().into();
1075 let text =
1076 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1077 .snapshot();
1078 let mut syntax = SyntaxMap::new(&text).snapshot();
1079 if let Some(language) = language.clone() {
1080 syntax.reparse(&text, language_registry, language);
1081 }
1082 BufferSnapshot {
1083 text,
1084 syntax,
1085 file: None,
1086 diagnostics: Default::default(),
1087 remote_selections: Default::default(),
1088 language,
1089 non_text_state_update_count: 0,
1090 }
1091 }
1092
1093 /// Retrieve a snapshot of the buffer's current state. This is computationally
1094 /// cheap, and allows reading from the buffer on a background thread.
1095 pub fn snapshot(&self) -> BufferSnapshot {
1096 let text = self.text.snapshot();
1097 let mut syntax_map = self.syntax_map.lock();
1098 syntax_map.interpolate(&text);
1099 let syntax = syntax_map.snapshot();
1100
1101 BufferSnapshot {
1102 text,
1103 syntax,
1104 file: self.file.clone(),
1105 remote_selections: self.remote_selections.clone(),
1106 diagnostics: self.diagnostics.clone(),
1107 language: self.language.clone(),
1108 non_text_state_update_count: self.non_text_state_update_count,
1109 }
1110 }
1111
1112 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1113 let this = cx.entity();
1114 cx.new(|cx| {
1115 let mut branch = Self {
1116 branch_state: Some(BufferBranchState {
1117 base_buffer: this.clone(),
1118 merged_operations: Default::default(),
1119 }),
1120 language: self.language.clone(),
1121 has_conflict: self.has_conflict,
1122 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1123 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1124 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1125 };
1126 if let Some(language_registry) = self.language_registry() {
1127 branch.set_language_registry(language_registry);
1128 }
1129
1130 // Reparse the branch buffer so that we get syntax highlighting immediately.
1131 branch.reparse(cx);
1132
1133 branch
1134 })
1135 }
1136
1137 pub fn preview_edits(
1138 &self,
1139 edits: Arc<[(Range<Anchor>, String)]>,
1140 cx: &App,
1141 ) -> Task<EditPreview> {
1142 let registry = self.language_registry();
1143 let language = self.language().cloned();
1144 let old_snapshot = self.text.snapshot();
1145 let mut branch_buffer = self.text.branch();
1146 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1147 let executor = cx.background_executor().clone();
1148 cx.background_spawn(async move {
1149 if !edits.is_empty() {
1150 if let Some(language) = language.clone() {
1151 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1152 }
1153
1154 branch_buffer.edit(edits.iter().cloned(), &executor);
1155 let snapshot = branch_buffer.snapshot();
1156 syntax_snapshot.interpolate(&snapshot);
1157
1158 if let Some(language) = language {
1159 syntax_snapshot.reparse(&snapshot, registry, language);
1160 }
1161 }
1162 EditPreview {
1163 old_snapshot,
1164 applied_edits_snapshot: branch_buffer.snapshot(),
1165 syntax_snapshot,
1166 }
1167 })
1168 }
1169
1170 /// Applies all of the changes in this buffer that intersect any of the
1171 /// given `ranges` to its base buffer.
1172 ///
1173 /// If `ranges` is empty, then all changes will be applied. This buffer must
1174 /// be a branch buffer to call this method.
1175 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1176 let Some(base_buffer) = self.base_buffer() else {
1177 debug_panic!("not a branch buffer");
1178 return;
1179 };
1180
1181 let mut ranges = if ranges.is_empty() {
1182 &[0..usize::MAX]
1183 } else {
1184 ranges.as_slice()
1185 }
1186 .iter()
1187 .peekable();
1188
1189 let mut edits = Vec::new();
1190 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1191 let mut is_included = false;
1192 while let Some(range) = ranges.peek() {
1193 if range.end < edit.new.start {
1194 ranges.next().unwrap();
1195 } else {
1196 if range.start <= edit.new.end {
1197 is_included = true;
1198 }
1199 break;
1200 }
1201 }
1202
1203 if is_included {
1204 edits.push((
1205 edit.old.clone(),
1206 self.text_for_range(edit.new.clone()).collect::<String>(),
1207 ));
1208 }
1209 }
1210
1211 let operation = base_buffer.update(cx, |base_buffer, cx| {
1212 // cx.emit(BufferEvent::DiffBaseChanged);
1213 base_buffer.edit(edits, None, cx)
1214 });
1215
1216 if let Some(operation) = operation
1217 && let Some(BufferBranchState {
1218 merged_operations, ..
1219 }) = &mut self.branch_state
1220 {
1221 merged_operations.push(operation);
1222 }
1223 }
1224
1225 fn on_base_buffer_event(
1226 &mut self,
1227 _: Entity<Buffer>,
1228 event: &BufferEvent,
1229 cx: &mut Context<Self>,
1230 ) {
1231 let BufferEvent::Operation { operation, .. } = event else {
1232 return;
1233 };
1234 let Some(BufferBranchState {
1235 merged_operations, ..
1236 }) = &mut self.branch_state
1237 else {
1238 return;
1239 };
1240
1241 let mut operation_to_undo = None;
1242 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1243 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1244 {
1245 merged_operations.remove(ix);
1246 operation_to_undo = Some(operation.timestamp);
1247 }
1248
1249 self.apply_ops([operation.clone()], cx);
1250
1251 if let Some(timestamp) = operation_to_undo {
1252 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1253 self.undo_operations(counts, cx);
1254 }
1255 }
1256
1257 #[cfg(test)]
1258 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1259 &self.text
1260 }
1261
1262 /// Retrieve a snapshot of the buffer's raw text, without any
1263 /// language-related state like the syntax tree or diagnostics.
1264 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1265 self.text.snapshot()
1266 }
1267
1268 /// The file associated with the buffer, if any.
1269 pub fn file(&self) -> Option<&Arc<dyn File>> {
1270 self.file.as_ref()
1271 }
1272
1273 /// The version of the buffer that was last saved or reloaded from disk.
1274 pub fn saved_version(&self) -> &clock::Global {
1275 &self.saved_version
1276 }
1277
1278 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1279 pub fn saved_mtime(&self) -> Option<MTime> {
1280 self.saved_mtime
1281 }
1282
1283 /// Assign a language to the buffer.
1284 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1285 self.non_text_state_update_count += 1;
1286 self.syntax_map.lock().clear(&self.text);
1287 self.language = language;
1288 self.was_changed();
1289 self.reparse(cx);
1290 cx.emit(BufferEvent::LanguageChanged);
1291 }
1292
1293 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1294 /// other languages if parts of the buffer are written in different languages.
1295 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1296 self.syntax_map
1297 .lock()
1298 .set_language_registry(language_registry);
1299 }
1300
1301 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1302 self.syntax_map.lock().language_registry()
1303 }
1304
1305 /// Assign the line ending type to the buffer.
1306 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1307 self.text.set_line_ending(line_ending);
1308
1309 let lamport_timestamp = self.text.lamport_clock.tick();
1310 self.send_operation(
1311 Operation::UpdateLineEnding {
1312 line_ending,
1313 lamport_timestamp,
1314 },
1315 true,
1316 cx,
1317 );
1318 }
1319
1320 /// Assign the buffer a new [`Capability`].
1321 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1322 if self.capability != capability {
1323 self.capability = capability;
1324 cx.emit(BufferEvent::CapabilityChanged)
1325 }
1326 }
1327
1328 /// This method is called to signal that the buffer has been saved.
1329 pub fn did_save(
1330 &mut self,
1331 version: clock::Global,
1332 mtime: Option<MTime>,
1333 cx: &mut Context<Self>,
1334 ) {
1335 self.saved_version = version.clone();
1336 self.has_unsaved_edits.set((version, false));
1337 self.has_conflict = false;
1338 self.saved_mtime = mtime;
1339 self.was_changed();
1340 cx.emit(BufferEvent::Saved);
1341 cx.notify();
1342 }
1343
1344 /// Reloads the contents of the buffer from disk.
1345 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1346 let (tx, rx) = futures::channel::oneshot::channel();
1347
1348 let prev_version = self.text.version();
1349 self.reload_task = Some(cx.spawn(async move |this, cx| {
1350 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1351 let file = this.file.as_ref()?.as_local()?;
1352 Some((file.disk_state().mtime(), { file.load(cx) }))
1353 })?
1354 else {
1355 return Ok(());
1356 };
1357
1358 let new_text = new_text.await?;
1359 let diff = this
1360 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1361 .await;
1362 this.update(cx, |this, cx| {
1363 if this.version() == diff.base_version {
1364 this.finalize_last_transaction();
1365 this.apply_diff(diff, cx);
1366 tx.send(this.finalize_last_transaction().cloned()).ok();
1367 this.has_conflict = false;
1368 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1369 } else {
1370 if !diff.edits.is_empty()
1371 || this
1372 .edits_since::<usize>(&diff.base_version)
1373 .next()
1374 .is_some()
1375 {
1376 this.has_conflict = true;
1377 }
1378
1379 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1380 }
1381
1382 this.reload_task.take();
1383 })
1384 }));
1385 rx
1386 }
1387
1388 /// This method is called to signal that the buffer has been reloaded.
1389 pub fn did_reload(
1390 &mut self,
1391 version: clock::Global,
1392 line_ending: LineEnding,
1393 mtime: Option<MTime>,
1394 cx: &mut Context<Self>,
1395 ) {
1396 self.saved_version = version;
1397 self.has_unsaved_edits
1398 .set((self.saved_version.clone(), false));
1399 self.text.set_line_ending(line_ending);
1400 self.saved_mtime = mtime;
1401 cx.emit(BufferEvent::Reloaded);
1402 cx.notify();
1403 }
1404
1405 /// Updates the [`File`] backing this buffer. This should be called when
1406 /// the file has changed or has been deleted.
1407 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1408 let was_dirty = self.is_dirty();
1409 let mut file_changed = false;
1410
1411 if let Some(old_file) = self.file.as_ref() {
1412 if new_file.path() != old_file.path() {
1413 file_changed = true;
1414 }
1415
1416 let old_state = old_file.disk_state();
1417 let new_state = new_file.disk_state();
1418 if old_state != new_state {
1419 file_changed = true;
1420 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1421 cx.emit(BufferEvent::ReloadNeeded)
1422 }
1423 }
1424 } else {
1425 file_changed = true;
1426 };
1427
1428 self.file = Some(new_file);
1429 if file_changed {
1430 self.was_changed();
1431 self.non_text_state_update_count += 1;
1432 if was_dirty != self.is_dirty() {
1433 cx.emit(BufferEvent::DirtyChanged);
1434 }
1435 cx.emit(BufferEvent::FileHandleChanged);
1436 cx.notify();
1437 }
1438 }
1439
1440 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1441 Some(self.branch_state.as_ref()?.base_buffer.clone())
1442 }
1443
1444 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1445 pub fn language(&self) -> Option<&Arc<Language>> {
1446 self.language.as_ref()
1447 }
1448
1449 /// Returns the [`Language`] at the given location.
1450 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1451 let offset = position.to_offset(self);
1452 let mut is_first = true;
1453 let start_anchor = self.anchor_before(offset);
1454 let end_anchor = self.anchor_after(offset);
1455 self.syntax_map
1456 .lock()
1457 .layers_for_range(offset..offset, &self.text, false)
1458 .filter(|layer| {
1459 if is_first {
1460 is_first = false;
1461 return true;
1462 }
1463
1464 layer
1465 .included_sub_ranges
1466 .map(|sub_ranges| {
1467 sub_ranges.iter().any(|sub_range| {
1468 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1469 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1470 !is_before_start && !is_after_end
1471 })
1472 })
1473 .unwrap_or(true)
1474 })
1475 .last()
1476 .map(|info| info.language.clone())
1477 .or_else(|| self.language.clone())
1478 }
1479
1480 /// Returns each [`Language`] for the active syntax layers at the given location.
1481 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1482 let offset = position.to_offset(self);
1483 let mut languages: Vec<Arc<Language>> = self
1484 .syntax_map
1485 .lock()
1486 .layers_for_range(offset..offset, &self.text, false)
1487 .map(|info| info.language.clone())
1488 .collect();
1489
1490 if languages.is_empty()
1491 && let Some(buffer_language) = self.language()
1492 {
1493 languages.push(buffer_language.clone());
1494 }
1495
1496 languages
1497 }
1498
1499 /// An integer version number that accounts for all updates besides
1500 /// the buffer's text itself (which is versioned via a version vector).
1501 pub fn non_text_state_update_count(&self) -> usize {
1502 self.non_text_state_update_count
1503 }
1504
1505 /// Whether the buffer is being parsed in the background.
1506 #[cfg(any(test, feature = "test-support"))]
1507 pub fn is_parsing(&self) -> bool {
1508 self.reparse.is_some()
1509 }
1510
1511 /// Indicates whether the buffer contains any regions that may be
1512 /// written in a language that hasn't been loaded yet.
1513 pub fn contains_unknown_injections(&self) -> bool {
1514 self.syntax_map.lock().contains_unknown_injections()
1515 }
1516
1517 #[cfg(any(test, feature = "test-support"))]
1518 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1519 self.sync_parse_timeout = timeout;
1520 }
1521
1522 /// Called after an edit to synchronize the buffer's main parse tree with
1523 /// the buffer's new underlying state.
1524 ///
1525 /// Locks the syntax map and interpolates the edits since the last reparse
1526 /// into the foreground syntax tree.
1527 ///
1528 /// Then takes a stable snapshot of the syntax map before unlocking it.
1529 /// The snapshot with the interpolated edits is sent to a background thread,
1530 /// where we ask Tree-sitter to perform an incremental parse.
1531 ///
1532 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1533 /// waiting on the parse to complete. As soon as it completes, we proceed
1534 /// synchronously, unless a 1ms timeout elapses.
1535 ///
1536 /// If we time out waiting on the parse, we spawn a second task waiting
1537 /// until the parse does complete and return with the interpolated tree still
1538 /// in the foreground. When the background parse completes, call back into
1539 /// the main thread and assign the foreground parse state.
1540 ///
1541 /// If the buffer or grammar changed since the start of the background parse,
1542 /// initiate an additional reparse recursively. To avoid concurrent parses
1543 /// for the same buffer, we only initiate a new parse if we are not already
1544 /// parsing in the background.
1545 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1546 if self.reparse.is_some() {
1547 return;
1548 }
1549 let language = if let Some(language) = self.language.clone() {
1550 language
1551 } else {
1552 return;
1553 };
1554
1555 let text = self.text_snapshot();
1556 let parsed_version = self.version();
1557
1558 let mut syntax_map = self.syntax_map.lock();
1559 syntax_map.interpolate(&text);
1560 let language_registry = syntax_map.language_registry();
1561 let mut syntax_snapshot = syntax_map.snapshot();
1562 drop(syntax_map);
1563
1564 let parse_task = cx.background_spawn({
1565 let language = language.clone();
1566 let language_registry = language_registry.clone();
1567 async move {
1568 syntax_snapshot.reparse(&text, language_registry, language);
1569 syntax_snapshot
1570 }
1571 });
1572
1573 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1574 match cx
1575 .background_executor()
1576 .block_with_timeout(self.sync_parse_timeout, parse_task)
1577 {
1578 Ok(new_syntax_snapshot) => {
1579 self.did_finish_parsing(new_syntax_snapshot, cx);
1580 self.reparse = None;
1581 }
1582 Err(parse_task) => {
1583 // todo(lw): hot foreground spawn
1584 self.reparse = Some(cx.spawn(async move |this, cx| {
1585 let new_syntax_map = cx.background_spawn(parse_task).await;
1586 this.update(cx, move |this, cx| {
1587 let grammar_changed = || {
1588 this.language.as_ref().is_none_or(|current_language| {
1589 !Arc::ptr_eq(&language, current_language)
1590 })
1591 };
1592 let language_registry_changed = || {
1593 new_syntax_map.contains_unknown_injections()
1594 && language_registry.is_some_and(|registry| {
1595 registry.version() != new_syntax_map.language_registry_version()
1596 })
1597 };
1598 let parse_again = this.version.changed_since(&parsed_version)
1599 || language_registry_changed()
1600 || grammar_changed();
1601 this.did_finish_parsing(new_syntax_map, cx);
1602 this.reparse = None;
1603 if parse_again {
1604 this.reparse(cx);
1605 }
1606 })
1607 .ok();
1608 }));
1609 }
1610 }
1611 }
1612
1613 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1614 self.was_changed();
1615 self.non_text_state_update_count += 1;
1616 self.syntax_map.lock().did_parse(syntax_snapshot);
1617 self.request_autoindent(cx);
1618 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1619 cx.emit(BufferEvent::Reparsed);
1620 cx.notify();
1621 }
1622
1623 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1624 self.parse_status.1.clone()
1625 }
1626
1627 /// Assign to the buffer a set of diagnostics created by a given language server.
1628 pub fn update_diagnostics(
1629 &mut self,
1630 server_id: LanguageServerId,
1631 diagnostics: DiagnosticSet,
1632 cx: &mut Context<Self>,
1633 ) {
1634 let lamport_timestamp = self.text.lamport_clock.tick();
1635 let op = Operation::UpdateDiagnostics {
1636 server_id,
1637 diagnostics: diagnostics.iter().cloned().collect(),
1638 lamport_timestamp,
1639 };
1640
1641 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1642 self.send_operation(op, true, cx);
1643 }
1644
1645 pub fn buffer_diagnostics(
1646 &self,
1647 for_server: Option<LanguageServerId>,
1648 ) -> Vec<&DiagnosticEntry<Anchor>> {
1649 match for_server {
1650 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1651 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1652 Err(_) => Vec::new(),
1653 },
1654 None => self
1655 .diagnostics
1656 .iter()
1657 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1658 .collect(),
1659 }
1660 }
1661
1662 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1663 if let Some(indent_sizes) = self.compute_autoindents() {
1664 let indent_sizes = cx.background_spawn(indent_sizes);
1665 match cx
1666 .background_executor()
1667 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1668 {
1669 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1670 Err(indent_sizes) => {
1671 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1672 let indent_sizes = indent_sizes.await;
1673 this.update(cx, |this, cx| {
1674 this.apply_autoindents(indent_sizes, cx);
1675 })
1676 .ok();
1677 }));
1678 }
1679 }
1680 } else {
1681 self.autoindent_requests.clear();
1682 for tx in self.wait_for_autoindent_txs.drain(..) {
1683 tx.send(()).ok();
1684 }
1685 }
1686 }
1687
1688 fn compute_autoindents(
1689 &self,
1690 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1691 let max_rows_between_yields = 100;
1692 let snapshot = self.snapshot();
1693 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1694 return None;
1695 }
1696
1697 let autoindent_requests = self.autoindent_requests.clone();
1698 Some(async move {
1699 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1700 for request in autoindent_requests {
1701 // Resolve each edited range to its row in the current buffer and in the
1702 // buffer before this batch of edits.
1703 let mut row_ranges = Vec::new();
1704 let mut old_to_new_rows = BTreeMap::new();
1705 let mut language_indent_sizes_by_new_row = Vec::new();
1706 for entry in &request.entries {
1707 let position = entry.range.start;
1708 let new_row = position.to_point(&snapshot).row;
1709 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1710 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1711
1712 if !entry.first_line_is_new {
1713 let old_row = position.to_point(&request.before_edit).row;
1714 old_to_new_rows.insert(old_row, new_row);
1715 }
1716 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1717 }
1718
1719 // Build a map containing the suggested indentation for each of the edited lines
1720 // with respect to the state of the buffer before these edits. This map is keyed
1721 // by the rows for these lines in the current state of the buffer.
1722 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1723 let old_edited_ranges =
1724 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1725 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1726 let mut language_indent_size = IndentSize::default();
1727 for old_edited_range in old_edited_ranges {
1728 let suggestions = request
1729 .before_edit
1730 .suggest_autoindents(old_edited_range.clone())
1731 .into_iter()
1732 .flatten();
1733 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1734 if let Some(suggestion) = suggestion {
1735 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1736
1737 // Find the indent size based on the language for this row.
1738 while let Some((row, size)) = language_indent_sizes.peek() {
1739 if *row > new_row {
1740 break;
1741 }
1742 language_indent_size = *size;
1743 language_indent_sizes.next();
1744 }
1745
1746 let suggested_indent = old_to_new_rows
1747 .get(&suggestion.basis_row)
1748 .and_then(|from_row| {
1749 Some(old_suggestions.get(from_row).copied()?.0)
1750 })
1751 .unwrap_or_else(|| {
1752 request
1753 .before_edit
1754 .indent_size_for_line(suggestion.basis_row)
1755 })
1756 .with_delta(suggestion.delta, language_indent_size);
1757 old_suggestions
1758 .insert(new_row, (suggested_indent, suggestion.within_error));
1759 }
1760 }
1761 yield_now().await;
1762 }
1763
1764 // Compute new suggestions for each line, but only include them in the result
1765 // if they differ from the old suggestion for that line.
1766 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1767 let mut language_indent_size = IndentSize::default();
1768 for (row_range, original_indent_column) in row_ranges {
1769 let new_edited_row_range = if request.is_block_mode {
1770 row_range.start..row_range.start + 1
1771 } else {
1772 row_range.clone()
1773 };
1774
1775 let suggestions = snapshot
1776 .suggest_autoindents(new_edited_row_range.clone())
1777 .into_iter()
1778 .flatten();
1779 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1780 if let Some(suggestion) = suggestion {
1781 // Find the indent size based on the language for this row.
1782 while let Some((row, size)) = language_indent_sizes.peek() {
1783 if *row > new_row {
1784 break;
1785 }
1786 language_indent_size = *size;
1787 language_indent_sizes.next();
1788 }
1789
1790 let suggested_indent = indent_sizes
1791 .get(&suggestion.basis_row)
1792 .copied()
1793 .map(|e| e.0)
1794 .unwrap_or_else(|| {
1795 snapshot.indent_size_for_line(suggestion.basis_row)
1796 })
1797 .with_delta(suggestion.delta, language_indent_size);
1798
1799 if old_suggestions.get(&new_row).is_none_or(
1800 |(old_indentation, was_within_error)| {
1801 suggested_indent != *old_indentation
1802 && (!suggestion.within_error || *was_within_error)
1803 },
1804 ) {
1805 indent_sizes.insert(
1806 new_row,
1807 (suggested_indent, request.ignore_empty_lines),
1808 );
1809 }
1810 }
1811 }
1812
1813 if let (true, Some(original_indent_column)) =
1814 (request.is_block_mode, original_indent_column)
1815 {
1816 let new_indent =
1817 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1818 *indent
1819 } else {
1820 snapshot.indent_size_for_line(row_range.start)
1821 };
1822 let delta = new_indent.len as i64 - original_indent_column as i64;
1823 if delta != 0 {
1824 for row in row_range.skip(1) {
1825 indent_sizes.entry(row).or_insert_with(|| {
1826 let mut size = snapshot.indent_size_for_line(row);
1827 if size.kind == new_indent.kind {
1828 match delta.cmp(&0) {
1829 Ordering::Greater => size.len += delta as u32,
1830 Ordering::Less => {
1831 size.len = size.len.saturating_sub(-delta as u32)
1832 }
1833 Ordering::Equal => {}
1834 }
1835 }
1836 (size, request.ignore_empty_lines)
1837 });
1838 }
1839 }
1840 }
1841
1842 yield_now().await;
1843 }
1844 }
1845
1846 indent_sizes
1847 .into_iter()
1848 .filter_map(|(row, (indent, ignore_empty_lines))| {
1849 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1850 None
1851 } else {
1852 Some((row, indent))
1853 }
1854 })
1855 .collect()
1856 })
1857 }
1858
1859 fn apply_autoindents(
1860 &mut self,
1861 indent_sizes: BTreeMap<u32, IndentSize>,
1862 cx: &mut Context<Self>,
1863 ) {
1864 self.autoindent_requests.clear();
1865 for tx in self.wait_for_autoindent_txs.drain(..) {
1866 tx.send(()).ok();
1867 }
1868
1869 let edits: Vec<_> = indent_sizes
1870 .into_iter()
1871 .filter_map(|(row, indent_size)| {
1872 let current_size = indent_size_for_line(self, row);
1873 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1874 })
1875 .collect();
1876
1877 let preserve_preview = self.preserve_preview();
1878 self.edit(edits, None, cx);
1879 if preserve_preview {
1880 self.refresh_preview();
1881 }
1882 }
1883
1884 /// Create a minimal edit that will cause the given row to be indented
1885 /// with the given size. After applying this edit, the length of the line
1886 /// will always be at least `new_size.len`.
1887 pub fn edit_for_indent_size_adjustment(
1888 row: u32,
1889 current_size: IndentSize,
1890 new_size: IndentSize,
1891 ) -> Option<(Range<Point>, String)> {
1892 if new_size.kind == current_size.kind {
1893 match new_size.len.cmp(¤t_size.len) {
1894 Ordering::Greater => {
1895 let point = Point::new(row, 0);
1896 Some((
1897 point..point,
1898 iter::repeat(new_size.char())
1899 .take((new_size.len - current_size.len) as usize)
1900 .collect::<String>(),
1901 ))
1902 }
1903
1904 Ordering::Less => Some((
1905 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1906 String::new(),
1907 )),
1908
1909 Ordering::Equal => None,
1910 }
1911 } else {
1912 Some((
1913 Point::new(row, 0)..Point::new(row, current_size.len),
1914 iter::repeat(new_size.char())
1915 .take(new_size.len as usize)
1916 .collect::<String>(),
1917 ))
1918 }
1919 }
1920
1921 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1922 /// and the given new text.
1923 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1924 let old_text = self.as_rope().clone();
1925 let base_version = self.version();
1926 cx.background_executor()
1927 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1928 let old_text = old_text.to_string();
1929 let line_ending = LineEnding::detect(&new_text);
1930 LineEnding::normalize(&mut new_text);
1931 let edits = text_diff(&old_text, &new_text);
1932 Diff {
1933 base_version,
1934 line_ending,
1935 edits,
1936 }
1937 })
1938 }
1939
1940 /// Spawns a background task that searches the buffer for any whitespace
1941 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1942 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1943 let old_text = self.as_rope().clone();
1944 let line_ending = self.line_ending();
1945 let base_version = self.version();
1946 cx.background_spawn(async move {
1947 let ranges = trailing_whitespace_ranges(&old_text);
1948 let empty = Arc::<str>::from("");
1949 Diff {
1950 base_version,
1951 line_ending,
1952 edits: ranges
1953 .into_iter()
1954 .map(|range| (range, empty.clone()))
1955 .collect(),
1956 }
1957 })
1958 }
1959
1960 /// Ensures that the buffer ends with a single newline character, and
1961 /// no other whitespace. Skips if the buffer is empty.
1962 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1963 let len = self.len();
1964 if len == 0 {
1965 return;
1966 }
1967 let mut offset = len;
1968 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1969 let non_whitespace_len = chunk
1970 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1971 .len();
1972 offset -= chunk.len();
1973 offset += non_whitespace_len;
1974 if non_whitespace_len != 0 {
1975 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1976 return;
1977 }
1978 break;
1979 }
1980 }
1981 self.edit([(offset..len, "\n")], None, cx);
1982 }
1983
1984 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1985 /// calculated, then adjust the diff to account for those changes, and discard any
1986 /// parts of the diff that conflict with those changes.
1987 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1988 let snapshot = self.snapshot();
1989 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1990 let mut delta = 0;
1991 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1992 while let Some(edit_since) = edits_since.peek() {
1993 // If the edit occurs after a diff hunk, then it does not
1994 // affect that hunk.
1995 if edit_since.old.start > range.end {
1996 break;
1997 }
1998 // If the edit precedes the diff hunk, then adjust the hunk
1999 // to reflect the edit.
2000 else if edit_since.old.end < range.start {
2001 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2002 edits_since.next();
2003 }
2004 // If the edit intersects a diff hunk, then discard that hunk.
2005 else {
2006 return None;
2007 }
2008 }
2009
2010 let start = (range.start as i64 + delta) as usize;
2011 let end = (range.end as i64 + delta) as usize;
2012 Some((start..end, new_text))
2013 });
2014
2015 self.start_transaction();
2016 self.text.set_line_ending(diff.line_ending);
2017 self.edit(adjusted_edits, None, cx);
2018 self.end_transaction(cx)
2019 }
2020
2021 pub fn has_unsaved_edits(&self) -> bool {
2022 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2023
2024 if last_version == self.version {
2025 self.has_unsaved_edits
2026 .set((last_version, has_unsaved_edits));
2027 return has_unsaved_edits;
2028 }
2029
2030 let has_edits = self.has_edits_since(&self.saved_version);
2031 self.has_unsaved_edits
2032 .set((self.version.clone(), has_edits));
2033 has_edits
2034 }
2035
2036 /// Checks if the buffer has unsaved changes.
2037 pub fn is_dirty(&self) -> bool {
2038 if self.capability == Capability::ReadOnly {
2039 return false;
2040 }
2041 if self.has_conflict {
2042 return true;
2043 }
2044 match self.file.as_ref().map(|f| f.disk_state()) {
2045 Some(DiskState::New) | Some(DiskState::Deleted) => {
2046 !self.is_empty() && self.has_unsaved_edits()
2047 }
2048 _ => self.has_unsaved_edits(),
2049 }
2050 }
2051
2052 /// Checks if the buffer and its file have both changed since the buffer
2053 /// was last saved or reloaded.
2054 pub fn has_conflict(&self) -> bool {
2055 if self.has_conflict {
2056 return true;
2057 }
2058 let Some(file) = self.file.as_ref() else {
2059 return false;
2060 };
2061 match file.disk_state() {
2062 DiskState::New => false,
2063 DiskState::Present { mtime } => match self.saved_mtime {
2064 Some(saved_mtime) => {
2065 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2066 }
2067 None => true,
2068 },
2069 DiskState::Deleted => false,
2070 }
2071 }
2072
2073 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2074 pub fn subscribe(&mut self) -> Subscription {
2075 self.text.subscribe()
2076 }
2077
2078 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2079 ///
2080 /// This allows downstream code to check if the buffer's text has changed without
2081 /// waiting for an effect cycle, which would be required if using eents.
2082 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2083 if let Err(ix) = self
2084 .change_bits
2085 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2086 {
2087 self.change_bits.insert(ix, bit);
2088 }
2089 }
2090
2091 /// Set the change bit for all "listeners".
2092 fn was_changed(&mut self) {
2093 self.change_bits.retain(|change_bit| {
2094 change_bit
2095 .upgrade()
2096 .inspect(|bit| {
2097 _ = bit.replace(true);
2098 })
2099 .is_some()
2100 });
2101 }
2102
2103 /// Starts a transaction, if one is not already in-progress. When undoing or
2104 /// redoing edits, all of the edits performed within a transaction are undone
2105 /// or redone together.
2106 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2107 self.start_transaction_at(Instant::now())
2108 }
2109
2110 /// Starts a transaction, providing the current time. Subsequent transactions
2111 /// that occur within a short period of time will be grouped together. This
2112 /// is controlled by the buffer's undo grouping duration.
2113 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2114 self.transaction_depth += 1;
2115 if self.was_dirty_before_starting_transaction.is_none() {
2116 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2117 }
2118 self.text.start_transaction_at(now)
2119 }
2120
2121 /// Terminates the current transaction, if this is the outermost transaction.
2122 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2123 self.end_transaction_at(Instant::now(), cx)
2124 }
2125
2126 /// Terminates the current transaction, providing the current time. Subsequent transactions
2127 /// that occur within a short period of time will be grouped together. This
2128 /// is controlled by the buffer's undo grouping duration.
2129 pub fn end_transaction_at(
2130 &mut self,
2131 now: Instant,
2132 cx: &mut Context<Self>,
2133 ) -> Option<TransactionId> {
2134 assert!(self.transaction_depth > 0);
2135 self.transaction_depth -= 1;
2136 let was_dirty = if self.transaction_depth == 0 {
2137 self.was_dirty_before_starting_transaction.take().unwrap()
2138 } else {
2139 false
2140 };
2141 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2142 self.did_edit(&start_version, was_dirty, cx);
2143 Some(transaction_id)
2144 } else {
2145 None
2146 }
2147 }
2148
2149 /// Manually add a transaction to the buffer's undo history.
2150 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2151 self.text.push_transaction(transaction, now);
2152 }
2153
2154 /// Differs from `push_transaction` in that it does not clear the redo
2155 /// stack. Intended to be used to create a parent transaction to merge
2156 /// potential child transactions into.
2157 ///
2158 /// The caller is responsible for removing it from the undo history using
2159 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2160 /// are merged into this transaction, the caller is responsible for ensuring
2161 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2162 /// cleared is to create transactions with the usual `start_transaction` and
2163 /// `end_transaction` methods and merging the resulting transactions into
2164 /// the transaction created by this method
2165 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2166 self.text.push_empty_transaction(now)
2167 }
2168
2169 /// Prevent the last transaction from being grouped with any subsequent transactions,
2170 /// even if they occur with the buffer's undo grouping duration.
2171 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2172 self.text.finalize_last_transaction()
2173 }
2174
2175 /// Manually group all changes since a given transaction.
2176 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2177 self.text.group_until_transaction(transaction_id);
2178 }
2179
2180 /// Manually remove a transaction from the buffer's undo history
2181 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2182 self.text.forget_transaction(transaction_id)
2183 }
2184
2185 /// Retrieve a transaction from the buffer's undo history
2186 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2187 self.text.get_transaction(transaction_id)
2188 }
2189
2190 /// Manually merge two transactions in the buffer's undo history.
2191 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2192 self.text.merge_transactions(transaction, destination);
2193 }
2194
2195 /// Waits for the buffer to receive operations with the given timestamps.
2196 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2197 &mut self,
2198 edit_ids: It,
2199 ) -> impl Future<Output = Result<()>> + use<It> {
2200 self.text.wait_for_edits(edit_ids)
2201 }
2202
2203 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2204 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2205 &mut self,
2206 anchors: It,
2207 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2208 self.text.wait_for_anchors(anchors)
2209 }
2210
2211 /// Waits for the buffer to receive operations up to the given version.
2212 pub fn wait_for_version(
2213 &mut self,
2214 version: clock::Global,
2215 ) -> impl Future<Output = Result<()>> + use<> {
2216 self.text.wait_for_version(version)
2217 }
2218
2219 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2220 /// [`Buffer::wait_for_version`] to resolve with an error.
2221 pub fn give_up_waiting(&mut self) {
2222 self.text.give_up_waiting();
2223 }
2224
2225 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2226 let mut rx = None;
2227 if !self.autoindent_requests.is_empty() {
2228 let channel = oneshot::channel();
2229 self.wait_for_autoindent_txs.push(channel.0);
2230 rx = Some(channel.1);
2231 }
2232 rx
2233 }
2234
2235 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2236 pub fn set_active_selections(
2237 &mut self,
2238 selections: Arc<[Selection<Anchor>]>,
2239 line_mode: bool,
2240 cursor_shape: CursorShape,
2241 cx: &mut Context<Self>,
2242 ) {
2243 let lamport_timestamp = self.text.lamport_clock.tick();
2244 self.remote_selections.insert(
2245 self.text.replica_id(),
2246 SelectionSet {
2247 selections: selections.clone(),
2248 lamport_timestamp,
2249 line_mode,
2250 cursor_shape,
2251 },
2252 );
2253 self.send_operation(
2254 Operation::UpdateSelections {
2255 selections,
2256 line_mode,
2257 lamport_timestamp,
2258 cursor_shape,
2259 },
2260 true,
2261 cx,
2262 );
2263 self.non_text_state_update_count += 1;
2264 cx.notify();
2265 }
2266
2267 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2268 /// this replica.
2269 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2270 if self
2271 .remote_selections
2272 .get(&self.text.replica_id())
2273 .is_none_or(|set| !set.selections.is_empty())
2274 {
2275 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2276 }
2277 }
2278
2279 pub fn set_agent_selections(
2280 &mut self,
2281 selections: Arc<[Selection<Anchor>]>,
2282 line_mode: bool,
2283 cursor_shape: CursorShape,
2284 cx: &mut Context<Self>,
2285 ) {
2286 let lamport_timestamp = self.text.lamport_clock.tick();
2287 self.remote_selections.insert(
2288 ReplicaId::AGENT,
2289 SelectionSet {
2290 selections,
2291 lamport_timestamp,
2292 line_mode,
2293 cursor_shape,
2294 },
2295 );
2296 self.non_text_state_update_count += 1;
2297 cx.notify();
2298 }
2299
2300 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2301 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2302 }
2303
2304 /// Replaces the buffer's entire text.
2305 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2306 where
2307 T: Into<Arc<str>>,
2308 {
2309 self.autoindent_requests.clear();
2310 self.edit([(0..self.len(), text)], None, cx)
2311 }
2312
2313 /// Appends the given text to the end of the buffer.
2314 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2315 where
2316 T: Into<Arc<str>>,
2317 {
2318 self.edit([(self.len()..self.len(), text)], None, cx)
2319 }
2320
2321 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2322 /// delete, and a string of text to insert at that location.
2323 ///
2324 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2325 /// request for the edited ranges, which will be processed when the buffer finishes
2326 /// parsing.
2327 ///
2328 /// Parsing takes place at the end of a transaction, and may compute synchronously
2329 /// or asynchronously, depending on the changes.
2330 pub fn edit<I, S, T>(
2331 &mut self,
2332 edits_iter: I,
2333 autoindent_mode: Option<AutoindentMode>,
2334 cx: &mut Context<Self>,
2335 ) -> Option<clock::Lamport>
2336 where
2337 I: IntoIterator<Item = (Range<S>, T)>,
2338 S: ToOffset,
2339 T: Into<Arc<str>>,
2340 {
2341 // Skip invalid edits and coalesce contiguous ones.
2342 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2343
2344 for (range, new_text) in edits_iter {
2345 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2346
2347 if range.start > range.end {
2348 mem::swap(&mut range.start, &mut range.end);
2349 }
2350 let new_text = new_text.into();
2351 if !new_text.is_empty() || !range.is_empty() {
2352 if let Some((prev_range, prev_text)) = edits.last_mut()
2353 && prev_range.end >= range.start
2354 {
2355 prev_range.end = cmp::max(prev_range.end, range.end);
2356 *prev_text = format!("{prev_text}{new_text}").into();
2357 } else {
2358 edits.push((range, new_text));
2359 }
2360 }
2361 }
2362 if edits.is_empty() {
2363 return None;
2364 }
2365
2366 self.start_transaction();
2367 self.pending_autoindent.take();
2368 let autoindent_request = autoindent_mode
2369 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2370
2371 let edit_operation = self
2372 .text
2373 .edit(edits.iter().cloned(), cx.background_executor());
2374 let edit_id = edit_operation.timestamp();
2375
2376 if let Some((before_edit, mode)) = autoindent_request {
2377 let mut delta = 0isize;
2378 let mut previous_setting = None;
2379 let entries: Vec<_> = edits
2380 .into_iter()
2381 .enumerate()
2382 .zip(&edit_operation.as_edit().unwrap().new_text)
2383 .filter(|((_, (range, _)), _)| {
2384 let language = before_edit.language_at(range.start);
2385 let language_id = language.map(|l| l.id());
2386 if let Some((cached_language_id, auto_indent)) = previous_setting
2387 && cached_language_id == language_id
2388 {
2389 auto_indent
2390 } else {
2391 // The auto-indent setting is not present in editorconfigs, hence
2392 // we can avoid passing the file here.
2393 let auto_indent =
2394 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2395 previous_setting = Some((language_id, auto_indent));
2396 auto_indent
2397 }
2398 })
2399 .map(|((ix, (range, _)), new_text)| {
2400 let new_text_length = new_text.len();
2401 let old_start = range.start.to_point(&before_edit);
2402 let new_start = (delta + range.start as isize) as usize;
2403 let range_len = range.end - range.start;
2404 delta += new_text_length as isize - range_len as isize;
2405
2406 // Decide what range of the insertion to auto-indent, and whether
2407 // the first line of the insertion should be considered a newly-inserted line
2408 // or an edit to an existing line.
2409 let mut range_of_insertion_to_indent = 0..new_text_length;
2410 let mut first_line_is_new = true;
2411
2412 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2413 let old_line_end = before_edit.line_len(old_start.row);
2414
2415 if old_start.column > old_line_start {
2416 first_line_is_new = false;
2417 }
2418
2419 if !new_text.contains('\n')
2420 && (old_start.column + (range_len as u32) < old_line_end
2421 || old_line_end == old_line_start)
2422 {
2423 first_line_is_new = false;
2424 }
2425
2426 // When inserting text starting with a newline, avoid auto-indenting the
2427 // previous line.
2428 if new_text.starts_with('\n') {
2429 range_of_insertion_to_indent.start += 1;
2430 first_line_is_new = true;
2431 }
2432
2433 let mut original_indent_column = None;
2434 if let AutoindentMode::Block {
2435 original_indent_columns,
2436 } = &mode
2437 {
2438 original_indent_column = Some(if new_text.starts_with('\n') {
2439 indent_size_for_text(
2440 new_text[range_of_insertion_to_indent.clone()].chars(),
2441 )
2442 .len
2443 } else {
2444 original_indent_columns
2445 .get(ix)
2446 .copied()
2447 .flatten()
2448 .unwrap_or_else(|| {
2449 indent_size_for_text(
2450 new_text[range_of_insertion_to_indent.clone()].chars(),
2451 )
2452 .len
2453 })
2454 });
2455
2456 // Avoid auto-indenting the line after the edit.
2457 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2458 range_of_insertion_to_indent.end -= 1;
2459 }
2460 }
2461
2462 AutoindentRequestEntry {
2463 first_line_is_new,
2464 original_indent_column,
2465 indent_size: before_edit.language_indent_size_at(range.start, cx),
2466 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2467 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2468 }
2469 })
2470 .collect();
2471
2472 if !entries.is_empty() {
2473 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2474 before_edit,
2475 entries,
2476 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2477 ignore_empty_lines: false,
2478 }));
2479 }
2480 }
2481
2482 self.end_transaction(cx);
2483 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2484 Some(edit_id)
2485 }
2486
2487 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2488 self.was_changed();
2489
2490 if self.edits_since::<usize>(old_version).next().is_none() {
2491 return;
2492 }
2493
2494 self.reparse(cx);
2495 cx.emit(BufferEvent::Edited);
2496 if was_dirty != self.is_dirty() {
2497 cx.emit(BufferEvent::DirtyChanged);
2498 }
2499 cx.notify();
2500 }
2501
2502 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2503 where
2504 I: IntoIterator<Item = Range<T>>,
2505 T: ToOffset + Copy,
2506 {
2507 let before_edit = self.snapshot();
2508 let entries = ranges
2509 .into_iter()
2510 .map(|range| AutoindentRequestEntry {
2511 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2512 first_line_is_new: true,
2513 indent_size: before_edit.language_indent_size_at(range.start, cx),
2514 original_indent_column: None,
2515 })
2516 .collect();
2517 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2518 before_edit,
2519 entries,
2520 is_block_mode: false,
2521 ignore_empty_lines: true,
2522 }));
2523 self.request_autoindent(cx);
2524 }
2525
2526 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2527 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2528 pub fn insert_empty_line(
2529 &mut self,
2530 position: impl ToPoint,
2531 space_above: bool,
2532 space_below: bool,
2533 cx: &mut Context<Self>,
2534 ) -> Point {
2535 let mut position = position.to_point(self);
2536
2537 self.start_transaction();
2538
2539 self.edit(
2540 [(position..position, "\n")],
2541 Some(AutoindentMode::EachLine),
2542 cx,
2543 );
2544
2545 if position.column > 0 {
2546 position += Point::new(1, 0);
2547 }
2548
2549 if !self.is_line_blank(position.row) {
2550 self.edit(
2551 [(position..position, "\n")],
2552 Some(AutoindentMode::EachLine),
2553 cx,
2554 );
2555 }
2556
2557 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2558 self.edit(
2559 [(position..position, "\n")],
2560 Some(AutoindentMode::EachLine),
2561 cx,
2562 );
2563 position.row += 1;
2564 }
2565
2566 if space_below
2567 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2568 {
2569 self.edit(
2570 [(position..position, "\n")],
2571 Some(AutoindentMode::EachLine),
2572 cx,
2573 );
2574 }
2575
2576 self.end_transaction(cx);
2577
2578 position
2579 }
2580
2581 /// Applies the given remote operations to the buffer.
2582 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2583 self.pending_autoindent.take();
2584 let was_dirty = self.is_dirty();
2585 let old_version = self.version.clone();
2586 let mut deferred_ops = Vec::new();
2587 let buffer_ops = ops
2588 .into_iter()
2589 .filter_map(|op| match op {
2590 Operation::Buffer(op) => Some(op),
2591 _ => {
2592 if self.can_apply_op(&op) {
2593 self.apply_op(op, cx);
2594 } else {
2595 deferred_ops.push(op);
2596 }
2597 None
2598 }
2599 })
2600 .collect::<Vec<_>>();
2601 for operation in buffer_ops.iter() {
2602 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2603 }
2604 self.text
2605 .apply_ops(buffer_ops, Some(cx.background_executor()));
2606 self.deferred_ops.insert(deferred_ops);
2607 self.flush_deferred_ops(cx);
2608 self.did_edit(&old_version, was_dirty, cx);
2609 // Notify independently of whether the buffer was edited as the operations could include a
2610 // selection update.
2611 cx.notify();
2612 }
2613
2614 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2615 let mut deferred_ops = Vec::new();
2616 for op in self.deferred_ops.drain().iter().cloned() {
2617 if self.can_apply_op(&op) {
2618 self.apply_op(op, cx);
2619 } else {
2620 deferred_ops.push(op);
2621 }
2622 }
2623 self.deferred_ops.insert(deferred_ops);
2624 }
2625
2626 pub fn has_deferred_ops(&self) -> bool {
2627 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2628 }
2629
2630 fn can_apply_op(&self, operation: &Operation) -> bool {
2631 match operation {
2632 Operation::Buffer(_) => {
2633 unreachable!("buffer operations should never be applied at this layer")
2634 }
2635 Operation::UpdateDiagnostics {
2636 diagnostics: diagnostic_set,
2637 ..
2638 } => diagnostic_set.iter().all(|diagnostic| {
2639 self.text.can_resolve(&diagnostic.range.start)
2640 && self.text.can_resolve(&diagnostic.range.end)
2641 }),
2642 Operation::UpdateSelections { selections, .. } => selections
2643 .iter()
2644 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2645 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2646 }
2647 }
2648
2649 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2650 match operation {
2651 Operation::Buffer(_) => {
2652 unreachable!("buffer operations should never be applied at this layer")
2653 }
2654 Operation::UpdateDiagnostics {
2655 server_id,
2656 diagnostics: diagnostic_set,
2657 lamport_timestamp,
2658 } => {
2659 let snapshot = self.snapshot();
2660 self.apply_diagnostic_update(
2661 server_id,
2662 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2663 lamport_timestamp,
2664 cx,
2665 );
2666 }
2667 Operation::UpdateSelections {
2668 selections,
2669 lamport_timestamp,
2670 line_mode,
2671 cursor_shape,
2672 } => {
2673 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2674 && set.lamport_timestamp > lamport_timestamp
2675 {
2676 return;
2677 }
2678
2679 self.remote_selections.insert(
2680 lamport_timestamp.replica_id,
2681 SelectionSet {
2682 selections,
2683 lamport_timestamp,
2684 line_mode,
2685 cursor_shape,
2686 },
2687 );
2688 self.text.lamport_clock.observe(lamport_timestamp);
2689 self.non_text_state_update_count += 1;
2690 }
2691 Operation::UpdateCompletionTriggers {
2692 triggers,
2693 lamport_timestamp,
2694 server_id,
2695 } => {
2696 if triggers.is_empty() {
2697 self.completion_triggers_per_language_server
2698 .remove(&server_id);
2699 self.completion_triggers = self
2700 .completion_triggers_per_language_server
2701 .values()
2702 .flat_map(|triggers| triggers.iter().cloned())
2703 .collect();
2704 } else {
2705 self.completion_triggers_per_language_server
2706 .insert(server_id, triggers.iter().cloned().collect());
2707 self.completion_triggers.extend(triggers);
2708 }
2709 self.text.lamport_clock.observe(lamport_timestamp);
2710 }
2711 Operation::UpdateLineEnding {
2712 line_ending,
2713 lamport_timestamp,
2714 } => {
2715 self.text.set_line_ending(line_ending);
2716 self.text.lamport_clock.observe(lamport_timestamp);
2717 }
2718 }
2719 }
2720
2721 fn apply_diagnostic_update(
2722 &mut self,
2723 server_id: LanguageServerId,
2724 diagnostics: DiagnosticSet,
2725 lamport_timestamp: clock::Lamport,
2726 cx: &mut Context<Self>,
2727 ) {
2728 if lamport_timestamp > self.diagnostics_timestamp {
2729 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2730 if diagnostics.is_empty() {
2731 if let Ok(ix) = ix {
2732 self.diagnostics.remove(ix);
2733 }
2734 } else {
2735 match ix {
2736 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2737 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2738 };
2739 }
2740 self.diagnostics_timestamp = lamport_timestamp;
2741 self.non_text_state_update_count += 1;
2742 self.text.lamport_clock.observe(lamport_timestamp);
2743 cx.notify();
2744 cx.emit(BufferEvent::DiagnosticsUpdated);
2745 }
2746 }
2747
2748 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2749 self.was_changed();
2750 cx.emit(BufferEvent::Operation {
2751 operation,
2752 is_local,
2753 });
2754 }
2755
2756 /// Removes the selections for a given peer.
2757 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2758 self.remote_selections.remove(&replica_id);
2759 cx.notify();
2760 }
2761
2762 /// Undoes the most recent transaction.
2763 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2764 let was_dirty = self.is_dirty();
2765 let old_version = self.version.clone();
2766
2767 if let Some((transaction_id, operation)) = self.text.undo() {
2768 self.send_operation(Operation::Buffer(operation), true, cx);
2769 self.did_edit(&old_version, was_dirty, cx);
2770 Some(transaction_id)
2771 } else {
2772 None
2773 }
2774 }
2775
2776 /// Manually undoes a specific transaction in the buffer's undo history.
2777 pub fn undo_transaction(
2778 &mut self,
2779 transaction_id: TransactionId,
2780 cx: &mut Context<Self>,
2781 ) -> bool {
2782 let was_dirty = self.is_dirty();
2783 let old_version = self.version.clone();
2784 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2785 self.send_operation(Operation::Buffer(operation), true, cx);
2786 self.did_edit(&old_version, was_dirty, cx);
2787 true
2788 } else {
2789 false
2790 }
2791 }
2792
2793 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2794 pub fn undo_to_transaction(
2795 &mut self,
2796 transaction_id: TransactionId,
2797 cx: &mut Context<Self>,
2798 ) -> bool {
2799 let was_dirty = self.is_dirty();
2800 let old_version = self.version.clone();
2801
2802 let operations = self.text.undo_to_transaction(transaction_id);
2803 let undone = !operations.is_empty();
2804 for operation in operations {
2805 self.send_operation(Operation::Buffer(operation), true, cx);
2806 }
2807 if undone {
2808 self.did_edit(&old_version, was_dirty, cx)
2809 }
2810 undone
2811 }
2812
2813 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2814 let was_dirty = self.is_dirty();
2815 let operation = self.text.undo_operations(counts);
2816 let old_version = self.version.clone();
2817 self.send_operation(Operation::Buffer(operation), true, cx);
2818 self.did_edit(&old_version, was_dirty, cx);
2819 }
2820
2821 /// Manually redoes a specific transaction in the buffer's redo history.
2822 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2823 let was_dirty = self.is_dirty();
2824 let old_version = self.version.clone();
2825
2826 if let Some((transaction_id, operation)) = self.text.redo() {
2827 self.send_operation(Operation::Buffer(operation), true, cx);
2828 self.did_edit(&old_version, was_dirty, cx);
2829 Some(transaction_id)
2830 } else {
2831 None
2832 }
2833 }
2834
2835 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2836 pub fn redo_to_transaction(
2837 &mut self,
2838 transaction_id: TransactionId,
2839 cx: &mut Context<Self>,
2840 ) -> bool {
2841 let was_dirty = self.is_dirty();
2842 let old_version = self.version.clone();
2843
2844 let operations = self.text.redo_to_transaction(transaction_id);
2845 let redone = !operations.is_empty();
2846 for operation in operations {
2847 self.send_operation(Operation::Buffer(operation), true, cx);
2848 }
2849 if redone {
2850 self.did_edit(&old_version, was_dirty, cx)
2851 }
2852 redone
2853 }
2854
2855 /// Override current completion triggers with the user-provided completion triggers.
2856 pub fn set_completion_triggers(
2857 &mut self,
2858 server_id: LanguageServerId,
2859 triggers: BTreeSet<String>,
2860 cx: &mut Context<Self>,
2861 ) {
2862 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2863 if triggers.is_empty() {
2864 self.completion_triggers_per_language_server
2865 .remove(&server_id);
2866 self.completion_triggers = self
2867 .completion_triggers_per_language_server
2868 .values()
2869 .flat_map(|triggers| triggers.iter().cloned())
2870 .collect();
2871 } else {
2872 self.completion_triggers_per_language_server
2873 .insert(server_id, triggers.clone());
2874 self.completion_triggers.extend(triggers.iter().cloned());
2875 }
2876 self.send_operation(
2877 Operation::UpdateCompletionTriggers {
2878 triggers: triggers.into_iter().collect(),
2879 lamport_timestamp: self.completion_triggers_timestamp,
2880 server_id,
2881 },
2882 true,
2883 cx,
2884 );
2885 cx.notify();
2886 }
2887
2888 /// Returns a list of strings which trigger a completion menu for this language.
2889 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2890 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2891 &self.completion_triggers
2892 }
2893
2894 /// Call this directly after performing edits to prevent the preview tab
2895 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2896 /// to return false until there are additional edits.
2897 pub fn refresh_preview(&mut self) {
2898 self.preview_version = self.version.clone();
2899 }
2900
2901 /// Whether we should preserve the preview status of a tab containing this buffer.
2902 pub fn preserve_preview(&self) -> bool {
2903 !self.has_edits_since(&self.preview_version)
2904 }
2905}
2906
2907#[doc(hidden)]
2908#[cfg(any(test, feature = "test-support"))]
2909impl Buffer {
2910 pub fn edit_via_marked_text(
2911 &mut self,
2912 marked_string: &str,
2913 autoindent_mode: Option<AutoindentMode>,
2914 cx: &mut Context<Self>,
2915 ) {
2916 let edits = self.edits_for_marked_text(marked_string);
2917 self.edit(edits, autoindent_mode, cx);
2918 }
2919
2920 pub fn set_group_interval(&mut self, group_interval: Duration) {
2921 self.text.set_group_interval(group_interval);
2922 }
2923
2924 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2925 where
2926 T: rand::Rng,
2927 {
2928 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2929 let mut last_end = None;
2930 for _ in 0..old_range_count {
2931 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2932 break;
2933 }
2934
2935 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2936 let mut range = self.random_byte_range(new_start, rng);
2937 if rng.random_bool(0.2) {
2938 mem::swap(&mut range.start, &mut range.end);
2939 }
2940 last_end = Some(range.end);
2941
2942 let new_text_len = rng.random_range(0..10);
2943 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2944 new_text = new_text.to_uppercase();
2945
2946 edits.push((range, new_text));
2947 }
2948 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2949 self.edit(edits, None, cx);
2950 }
2951
2952 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2953 let was_dirty = self.is_dirty();
2954 let old_version = self.version.clone();
2955
2956 let ops = self.text.randomly_undo_redo(rng);
2957 if !ops.is_empty() {
2958 for op in ops {
2959 self.send_operation(Operation::Buffer(op), true, cx);
2960 self.did_edit(&old_version, was_dirty, cx);
2961 }
2962 }
2963 }
2964}
2965
2966impl EventEmitter<BufferEvent> for Buffer {}
2967
2968impl Deref for Buffer {
2969 type Target = TextBuffer;
2970
2971 fn deref(&self) -> &Self::Target {
2972 &self.text
2973 }
2974}
2975
2976impl BufferSnapshot {
2977 /// Returns [`IndentSize`] for a given line that respects user settings and
2978 /// language preferences.
2979 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2980 indent_size_for_line(self, row)
2981 }
2982
2983 /// Returns [`IndentSize`] for a given position that respects user settings
2984 /// and language preferences.
2985 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2986 let settings = language_settings(
2987 self.language_at(position).map(|l| l.name()),
2988 self.file(),
2989 cx,
2990 );
2991 if settings.hard_tabs {
2992 IndentSize::tab()
2993 } else {
2994 IndentSize::spaces(settings.tab_size.get())
2995 }
2996 }
2997
2998 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2999 /// is passed in as `single_indent_size`.
3000 pub fn suggested_indents(
3001 &self,
3002 rows: impl Iterator<Item = u32>,
3003 single_indent_size: IndentSize,
3004 ) -> BTreeMap<u32, IndentSize> {
3005 let mut result = BTreeMap::new();
3006
3007 for row_range in contiguous_ranges(rows, 10) {
3008 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3009 Some(suggestions) => suggestions,
3010 _ => break,
3011 };
3012
3013 for (row, suggestion) in row_range.zip(suggestions) {
3014 let indent_size = if let Some(suggestion) = suggestion {
3015 result
3016 .get(&suggestion.basis_row)
3017 .copied()
3018 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3019 .with_delta(suggestion.delta, single_indent_size)
3020 } else {
3021 self.indent_size_for_line(row)
3022 };
3023
3024 result.insert(row, indent_size);
3025 }
3026 }
3027
3028 result
3029 }
3030
3031 fn suggest_autoindents(
3032 &self,
3033 row_range: Range<u32>,
3034 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3035 let config = &self.language.as_ref()?.config;
3036 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3037
3038 #[derive(Debug, Clone)]
3039 struct StartPosition {
3040 start: Point,
3041 suffix: SharedString,
3042 }
3043
3044 // Find the suggested indentation ranges based on the syntax tree.
3045 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3046 let end = Point::new(row_range.end, 0);
3047 let range = (start..end).to_offset(&self.text);
3048 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3049 Some(&grammar.indents_config.as_ref()?.query)
3050 });
3051 let indent_configs = matches
3052 .grammars()
3053 .iter()
3054 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3055 .collect::<Vec<_>>();
3056
3057 let mut indent_ranges = Vec::<Range<Point>>::new();
3058 let mut start_positions = Vec::<StartPosition>::new();
3059 let mut outdent_positions = Vec::<Point>::new();
3060 while let Some(mat) = matches.peek() {
3061 let mut start: Option<Point> = None;
3062 let mut end: Option<Point> = None;
3063
3064 let config = indent_configs[mat.grammar_index];
3065 for capture in mat.captures {
3066 if capture.index == config.indent_capture_ix {
3067 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3068 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3069 } else if Some(capture.index) == config.start_capture_ix {
3070 start = Some(Point::from_ts_point(capture.node.end_position()));
3071 } else if Some(capture.index) == config.end_capture_ix {
3072 end = Some(Point::from_ts_point(capture.node.start_position()));
3073 } else if Some(capture.index) == config.outdent_capture_ix {
3074 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3075 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3076 start_positions.push(StartPosition {
3077 start: Point::from_ts_point(capture.node.start_position()),
3078 suffix: suffix.clone(),
3079 });
3080 }
3081 }
3082
3083 matches.advance();
3084 if let Some((start, end)) = start.zip(end) {
3085 if start.row == end.row {
3086 continue;
3087 }
3088 let range = start..end;
3089 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3090 Err(ix) => indent_ranges.insert(ix, range),
3091 Ok(ix) => {
3092 let prev_range = &mut indent_ranges[ix];
3093 prev_range.end = prev_range.end.max(range.end);
3094 }
3095 }
3096 }
3097 }
3098
3099 let mut error_ranges = Vec::<Range<Point>>::new();
3100 let mut matches = self
3101 .syntax
3102 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3103 while let Some(mat) = matches.peek() {
3104 let node = mat.captures[0].node;
3105 let start = Point::from_ts_point(node.start_position());
3106 let end = Point::from_ts_point(node.end_position());
3107 let range = start..end;
3108 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3109 Ok(ix) | Err(ix) => ix,
3110 };
3111 let mut end_ix = ix;
3112 while let Some(existing_range) = error_ranges.get(end_ix) {
3113 if existing_range.end < end {
3114 end_ix += 1;
3115 } else {
3116 break;
3117 }
3118 }
3119 error_ranges.splice(ix..end_ix, [range]);
3120 matches.advance();
3121 }
3122
3123 outdent_positions.sort();
3124 for outdent_position in outdent_positions {
3125 // find the innermost indent range containing this outdent_position
3126 // set its end to the outdent position
3127 if let Some(range_to_truncate) = indent_ranges
3128 .iter_mut()
3129 .filter(|indent_range| indent_range.contains(&outdent_position))
3130 .next_back()
3131 {
3132 range_to_truncate.end = outdent_position;
3133 }
3134 }
3135
3136 start_positions.sort_by_key(|b| b.start);
3137
3138 // Find the suggested indentation increases and decreased based on regexes.
3139 let mut regex_outdent_map = HashMap::default();
3140 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3141 let mut start_positions_iter = start_positions.iter().peekable();
3142
3143 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3144 self.for_each_line(
3145 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3146 ..Point::new(row_range.end, 0),
3147 |row, line| {
3148 if config
3149 .decrease_indent_pattern
3150 .as_ref()
3151 .is_some_and(|regex| regex.is_match(line))
3152 {
3153 indent_change_rows.push((row, Ordering::Less));
3154 }
3155 if config
3156 .increase_indent_pattern
3157 .as_ref()
3158 .is_some_and(|regex| regex.is_match(line))
3159 {
3160 indent_change_rows.push((row + 1, Ordering::Greater));
3161 }
3162 while let Some(pos) = start_positions_iter.peek() {
3163 if pos.start.row < row {
3164 let pos = start_positions_iter.next().unwrap();
3165 last_seen_suffix
3166 .entry(pos.suffix.to_string())
3167 .or_default()
3168 .push(pos.start);
3169 } else {
3170 break;
3171 }
3172 }
3173 for rule in &config.decrease_indent_patterns {
3174 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3175 let row_start_column = self.indent_size_for_line(row).len;
3176 let basis_row = rule
3177 .valid_after
3178 .iter()
3179 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3180 .flatten()
3181 .filter(|start_point| start_point.column <= row_start_column)
3182 .max_by_key(|start_point| start_point.row);
3183 if let Some(outdent_to_row) = basis_row {
3184 regex_outdent_map.insert(row, outdent_to_row.row);
3185 }
3186 break;
3187 }
3188 }
3189 },
3190 );
3191
3192 let mut indent_changes = indent_change_rows.into_iter().peekable();
3193 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3194 prev_non_blank_row.unwrap_or(0)
3195 } else {
3196 row_range.start.saturating_sub(1)
3197 };
3198
3199 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3200 Some(row_range.map(move |row| {
3201 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3202
3203 let mut indent_from_prev_row = false;
3204 let mut outdent_from_prev_row = false;
3205 let mut outdent_to_row = u32::MAX;
3206 let mut from_regex = false;
3207
3208 while let Some((indent_row, delta)) = indent_changes.peek() {
3209 match indent_row.cmp(&row) {
3210 Ordering::Equal => match delta {
3211 Ordering::Less => {
3212 from_regex = true;
3213 outdent_from_prev_row = true
3214 }
3215 Ordering::Greater => {
3216 indent_from_prev_row = true;
3217 from_regex = true
3218 }
3219 _ => {}
3220 },
3221
3222 Ordering::Greater => break,
3223 Ordering::Less => {}
3224 }
3225
3226 indent_changes.next();
3227 }
3228
3229 for range in &indent_ranges {
3230 if range.start.row >= row {
3231 break;
3232 }
3233 if range.start.row == prev_row && range.end > row_start {
3234 indent_from_prev_row = true;
3235 }
3236 if range.end > prev_row_start && range.end <= row_start {
3237 outdent_to_row = outdent_to_row.min(range.start.row);
3238 }
3239 }
3240
3241 if let Some(basis_row) = regex_outdent_map.get(&row) {
3242 indent_from_prev_row = false;
3243 outdent_to_row = *basis_row;
3244 from_regex = true;
3245 }
3246
3247 let within_error = error_ranges
3248 .iter()
3249 .any(|e| e.start.row < row && e.end > row_start);
3250
3251 let suggestion = if outdent_to_row == prev_row
3252 || (outdent_from_prev_row && indent_from_prev_row)
3253 {
3254 Some(IndentSuggestion {
3255 basis_row: prev_row,
3256 delta: Ordering::Equal,
3257 within_error: within_error && !from_regex,
3258 })
3259 } else if indent_from_prev_row {
3260 Some(IndentSuggestion {
3261 basis_row: prev_row,
3262 delta: Ordering::Greater,
3263 within_error: within_error && !from_regex,
3264 })
3265 } else if outdent_to_row < prev_row {
3266 Some(IndentSuggestion {
3267 basis_row: outdent_to_row,
3268 delta: Ordering::Equal,
3269 within_error: within_error && !from_regex,
3270 })
3271 } else if outdent_from_prev_row {
3272 Some(IndentSuggestion {
3273 basis_row: prev_row,
3274 delta: Ordering::Less,
3275 within_error: within_error && !from_regex,
3276 })
3277 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3278 {
3279 Some(IndentSuggestion {
3280 basis_row: prev_row,
3281 delta: Ordering::Equal,
3282 within_error: within_error && !from_regex,
3283 })
3284 } else {
3285 None
3286 };
3287
3288 prev_row = row;
3289 prev_row_start = row_start;
3290 suggestion
3291 }))
3292 }
3293
3294 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3295 while row > 0 {
3296 row -= 1;
3297 if !self.is_line_blank(row) {
3298 return Some(row);
3299 }
3300 }
3301 None
3302 }
3303
3304 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3305 let captures = self.syntax.captures(range, &self.text, |grammar| {
3306 grammar
3307 .highlights_config
3308 .as_ref()
3309 .map(|config| &config.query)
3310 });
3311 let highlight_maps = captures
3312 .grammars()
3313 .iter()
3314 .map(|grammar| grammar.highlight_map())
3315 .collect();
3316 (captures, highlight_maps)
3317 }
3318
3319 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3320 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3321 /// returned in chunks where each chunk has a single syntax highlighting style and
3322 /// diagnostic status.
3323 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3324 let range = range.start.to_offset(self)..range.end.to_offset(self);
3325
3326 let mut syntax = None;
3327 if language_aware {
3328 syntax = Some(self.get_highlights(range.clone()));
3329 }
3330 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3331 let diagnostics = language_aware;
3332 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3333 }
3334
3335 pub fn highlighted_text_for_range<T: ToOffset>(
3336 &self,
3337 range: Range<T>,
3338 override_style: Option<HighlightStyle>,
3339 syntax_theme: &SyntaxTheme,
3340 ) -> HighlightedText {
3341 HighlightedText::from_buffer_range(
3342 range,
3343 &self.text,
3344 &self.syntax,
3345 override_style,
3346 syntax_theme,
3347 )
3348 }
3349
3350 /// Invokes the given callback for each line of text in the given range of the buffer.
3351 /// Uses callback to avoid allocating a string for each line.
3352 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3353 let mut line = String::new();
3354 let mut row = range.start.row;
3355 for chunk in self
3356 .as_rope()
3357 .chunks_in_range(range.to_offset(self))
3358 .chain(["\n"])
3359 {
3360 for (newline_ix, text) in chunk.split('\n').enumerate() {
3361 if newline_ix > 0 {
3362 callback(row, &line);
3363 row += 1;
3364 line.clear();
3365 }
3366 line.push_str(text);
3367 }
3368 }
3369 }
3370
3371 /// Iterates over every [`SyntaxLayer`] in the buffer.
3372 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3373 self.syntax_layers_for_range(0..self.len(), true)
3374 }
3375
3376 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3377 let offset = position.to_offset(self);
3378 self.syntax_layers_for_range(offset..offset, false)
3379 .filter(|l| l.node().end_byte() > offset)
3380 .last()
3381 }
3382
3383 pub fn syntax_layers_for_range<D: ToOffset>(
3384 &self,
3385 range: Range<D>,
3386 include_hidden: bool,
3387 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3388 self.syntax
3389 .layers_for_range(range, &self.text, include_hidden)
3390 }
3391
3392 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3393 &self,
3394 range: Range<D>,
3395 ) -> Option<SyntaxLayer<'_>> {
3396 let range = range.to_offset(self);
3397 self.syntax
3398 .layers_for_range(range, &self.text, false)
3399 .max_by(|a, b| {
3400 if a.depth != b.depth {
3401 a.depth.cmp(&b.depth)
3402 } else if a.offset.0 != b.offset.0 {
3403 a.offset.0.cmp(&b.offset.0)
3404 } else {
3405 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3406 }
3407 })
3408 }
3409
3410 /// Returns the main [`Language`].
3411 pub fn language(&self) -> Option<&Arc<Language>> {
3412 self.language.as_ref()
3413 }
3414
3415 /// Returns the [`Language`] at the given location.
3416 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3417 self.syntax_layer_at(position)
3418 .map(|info| info.language)
3419 .or(self.language.as_ref())
3420 }
3421
3422 /// Returns the settings for the language at the given location.
3423 pub fn settings_at<'a, D: ToOffset>(
3424 &'a self,
3425 position: D,
3426 cx: &'a App,
3427 ) -> Cow<'a, LanguageSettings> {
3428 language_settings(
3429 self.language_at(position).map(|l| l.name()),
3430 self.file.as_ref(),
3431 cx,
3432 )
3433 }
3434
3435 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3436 CharClassifier::new(self.language_scope_at(point))
3437 }
3438
3439 /// Returns the [`LanguageScope`] at the given location.
3440 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3441 let offset = position.to_offset(self);
3442 let mut scope = None;
3443 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3444
3445 // Use the layer that has the smallest node intersecting the given point.
3446 for layer in self
3447 .syntax
3448 .layers_for_range(offset..offset, &self.text, false)
3449 {
3450 let mut cursor = layer.node().walk();
3451
3452 let mut range = None;
3453 loop {
3454 let child_range = cursor.node().byte_range();
3455 if !child_range.contains(&offset) {
3456 break;
3457 }
3458
3459 range = Some(child_range);
3460 if cursor.goto_first_child_for_byte(offset).is_none() {
3461 break;
3462 }
3463 }
3464
3465 if let Some(range) = range
3466 && smallest_range_and_depth.as_ref().is_none_or(
3467 |(smallest_range, smallest_range_depth)| {
3468 if layer.depth > *smallest_range_depth {
3469 true
3470 } else if layer.depth == *smallest_range_depth {
3471 range.len() < smallest_range.len()
3472 } else {
3473 false
3474 }
3475 },
3476 )
3477 {
3478 smallest_range_and_depth = Some((range, layer.depth));
3479 scope = Some(LanguageScope {
3480 language: layer.language.clone(),
3481 override_id: layer.override_id(offset, &self.text),
3482 });
3483 }
3484 }
3485
3486 scope.or_else(|| {
3487 self.language.clone().map(|language| LanguageScope {
3488 language,
3489 override_id: None,
3490 })
3491 })
3492 }
3493
3494 /// Returns a tuple of the range and character kind of the word
3495 /// surrounding the given position.
3496 pub fn surrounding_word<T: ToOffset>(
3497 &self,
3498 start: T,
3499 scope_context: Option<CharScopeContext>,
3500 ) -> (Range<usize>, Option<CharKind>) {
3501 let mut start = start.to_offset(self);
3502 let mut end = start;
3503 let mut next_chars = self.chars_at(start).take(128).peekable();
3504 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3505
3506 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3507 let word_kind = cmp::max(
3508 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3509 next_chars.peek().copied().map(|c| classifier.kind(c)),
3510 );
3511
3512 for ch in prev_chars {
3513 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3514 start -= ch.len_utf8();
3515 } else {
3516 break;
3517 }
3518 }
3519
3520 for ch in next_chars {
3521 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3522 end += ch.len_utf8();
3523 } else {
3524 break;
3525 }
3526 }
3527
3528 (start..end, word_kind)
3529 }
3530
3531 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3532 /// range. When `require_larger` is true, the node found must be larger than the query range.
3533 ///
3534 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3535 /// be moved to the root of the tree.
3536 fn goto_node_enclosing_range(
3537 cursor: &mut tree_sitter::TreeCursor,
3538 query_range: &Range<usize>,
3539 require_larger: bool,
3540 ) -> bool {
3541 let mut ascending = false;
3542 loop {
3543 let mut range = cursor.node().byte_range();
3544 if query_range.is_empty() {
3545 // When the query range is empty and the current node starts after it, move to the
3546 // previous sibling to find the node the containing node.
3547 if range.start > query_range.start {
3548 cursor.goto_previous_sibling();
3549 range = cursor.node().byte_range();
3550 }
3551 } else {
3552 // When the query range is non-empty and the current node ends exactly at the start,
3553 // move to the next sibling to find a node that extends beyond the start.
3554 if range.end == query_range.start {
3555 cursor.goto_next_sibling();
3556 range = cursor.node().byte_range();
3557 }
3558 }
3559
3560 let encloses = range.contains_inclusive(query_range)
3561 && (!require_larger || range.len() > query_range.len());
3562 if !encloses {
3563 ascending = true;
3564 if !cursor.goto_parent() {
3565 return false;
3566 }
3567 continue;
3568 } else if ascending {
3569 return true;
3570 }
3571
3572 // Descend into the current node.
3573 if cursor
3574 .goto_first_child_for_byte(query_range.start)
3575 .is_none()
3576 {
3577 return true;
3578 }
3579 }
3580 }
3581
3582 pub fn syntax_ancestor<'a, T: ToOffset>(
3583 &'a self,
3584 range: Range<T>,
3585 ) -> Option<tree_sitter::Node<'a>> {
3586 let range = range.start.to_offset(self)..range.end.to_offset(self);
3587 let mut result: Option<tree_sitter::Node<'a>> = None;
3588 for layer in self
3589 .syntax
3590 .layers_for_range(range.clone(), &self.text, true)
3591 {
3592 let mut cursor = layer.node().walk();
3593
3594 // Find the node that both contains the range and is larger than it.
3595 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3596 continue;
3597 }
3598
3599 let left_node = cursor.node();
3600 let mut layer_result = left_node;
3601
3602 // For an empty range, try to find another node immediately to the right of the range.
3603 if left_node.end_byte() == range.start {
3604 let mut right_node = None;
3605 while !cursor.goto_next_sibling() {
3606 if !cursor.goto_parent() {
3607 break;
3608 }
3609 }
3610
3611 while cursor.node().start_byte() == range.start {
3612 right_node = Some(cursor.node());
3613 if !cursor.goto_first_child() {
3614 break;
3615 }
3616 }
3617
3618 // If there is a candidate node on both sides of the (empty) range, then
3619 // decide between the two by favoring a named node over an anonymous token.
3620 // If both nodes are the same in that regard, favor the right one.
3621 if let Some(right_node) = right_node
3622 && (right_node.is_named() || !left_node.is_named())
3623 {
3624 layer_result = right_node;
3625 }
3626 }
3627
3628 if let Some(previous_result) = &result
3629 && previous_result.byte_range().len() < layer_result.byte_range().len()
3630 {
3631 continue;
3632 }
3633 result = Some(layer_result);
3634 }
3635
3636 result
3637 }
3638
3639 /// Find the previous sibling syntax node at the given range.
3640 ///
3641 /// This function locates the syntax node that precedes the node containing
3642 /// the given range. It searches hierarchically by:
3643 /// 1. Finding the node that contains the given range
3644 /// 2. Looking for the previous sibling at the same tree level
3645 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3646 ///
3647 /// Returns `None` if there is no previous sibling at any ancestor level.
3648 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3649 &'a self,
3650 range: Range<T>,
3651 ) -> Option<tree_sitter::Node<'a>> {
3652 let range = range.start.to_offset(self)..range.end.to_offset(self);
3653 let mut result: Option<tree_sitter::Node<'a>> = None;
3654
3655 for layer in self
3656 .syntax
3657 .layers_for_range(range.clone(), &self.text, true)
3658 {
3659 let mut cursor = layer.node().walk();
3660
3661 // Find the node that contains the range
3662 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3663 continue;
3664 }
3665
3666 // Look for the previous sibling, moving up ancestor levels if needed
3667 loop {
3668 if cursor.goto_previous_sibling() {
3669 let layer_result = cursor.node();
3670
3671 if let Some(previous_result) = &result {
3672 if previous_result.byte_range().end < layer_result.byte_range().end {
3673 continue;
3674 }
3675 }
3676 result = Some(layer_result);
3677 break;
3678 }
3679
3680 // No sibling found at this level, try moving up to parent
3681 if !cursor.goto_parent() {
3682 break;
3683 }
3684 }
3685 }
3686
3687 result
3688 }
3689
3690 /// Find the next sibling syntax node at the given range.
3691 ///
3692 /// This function locates the syntax node that follows the node containing
3693 /// the given range. It searches hierarchically by:
3694 /// 1. Finding the node that contains the given range
3695 /// 2. Looking for the next sibling at the same tree level
3696 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3697 ///
3698 /// Returns `None` if there is no next sibling at any ancestor level.
3699 pub fn syntax_next_sibling<'a, T: ToOffset>(
3700 &'a self,
3701 range: Range<T>,
3702 ) -> Option<tree_sitter::Node<'a>> {
3703 let range = range.start.to_offset(self)..range.end.to_offset(self);
3704 let mut result: Option<tree_sitter::Node<'a>> = None;
3705
3706 for layer in self
3707 .syntax
3708 .layers_for_range(range.clone(), &self.text, true)
3709 {
3710 let mut cursor = layer.node().walk();
3711
3712 // Find the node that contains the range
3713 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3714 continue;
3715 }
3716
3717 // Look for the next sibling, moving up ancestor levels if needed
3718 loop {
3719 if cursor.goto_next_sibling() {
3720 let layer_result = cursor.node();
3721
3722 if let Some(previous_result) = &result {
3723 if previous_result.byte_range().start > layer_result.byte_range().start {
3724 continue;
3725 }
3726 }
3727 result = Some(layer_result);
3728 break;
3729 }
3730
3731 // No sibling found at this level, try moving up to parent
3732 if !cursor.goto_parent() {
3733 break;
3734 }
3735 }
3736 }
3737
3738 result
3739 }
3740
3741 /// Returns the root syntax node within the given row
3742 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3743 let start_offset = position.to_offset(self);
3744
3745 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3746
3747 let layer = self
3748 .syntax
3749 .layers_for_range(start_offset..start_offset, &self.text, true)
3750 .next()?;
3751
3752 let mut cursor = layer.node().walk();
3753
3754 // Descend to the first leaf that touches the start of the range.
3755 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3756 if cursor.node().end_byte() == start_offset {
3757 cursor.goto_next_sibling();
3758 }
3759 }
3760
3761 // Ascend to the root node within the same row.
3762 while cursor.goto_parent() {
3763 if cursor.node().start_position().row != row {
3764 break;
3765 }
3766 }
3767
3768 Some(cursor.node())
3769 }
3770
3771 /// Returns the outline for the buffer.
3772 ///
3773 /// This method allows passing an optional [`SyntaxTheme`] to
3774 /// syntax-highlight the returned symbols.
3775 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3776 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3777 }
3778
3779 /// Returns all the symbols that contain the given position.
3780 ///
3781 /// This method allows passing an optional [`SyntaxTheme`] to
3782 /// syntax-highlight the returned symbols.
3783 pub fn symbols_containing<T: ToOffset>(
3784 &self,
3785 position: T,
3786 theme: Option<&SyntaxTheme>,
3787 ) -> Vec<OutlineItem<Anchor>> {
3788 let position = position.to_offset(self);
3789 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3790 let end = self.clip_offset(position + 1, Bias::Right);
3791 let mut items = self.outline_items_containing(start..end, false, theme);
3792 let mut prev_depth = None;
3793 items.retain(|item| {
3794 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3795 prev_depth = Some(item.depth);
3796 result
3797 });
3798 items
3799 }
3800
3801 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3802 let range = range.to_offset(self);
3803 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3804 grammar.outline_config.as_ref().map(|c| &c.query)
3805 });
3806 let configs = matches
3807 .grammars()
3808 .iter()
3809 .map(|g| g.outline_config.as_ref().unwrap())
3810 .collect::<Vec<_>>();
3811
3812 while let Some(mat) = matches.peek() {
3813 let config = &configs[mat.grammar_index];
3814 let containing_item_node = maybe!({
3815 let item_node = mat.captures.iter().find_map(|cap| {
3816 if cap.index == config.item_capture_ix {
3817 Some(cap.node)
3818 } else {
3819 None
3820 }
3821 })?;
3822
3823 let item_byte_range = item_node.byte_range();
3824 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3825 None
3826 } else {
3827 Some(item_node)
3828 }
3829 });
3830
3831 if let Some(item_node) = containing_item_node {
3832 return Some(
3833 Point::from_ts_point(item_node.start_position())
3834 ..Point::from_ts_point(item_node.end_position()),
3835 );
3836 }
3837
3838 matches.advance();
3839 }
3840 None
3841 }
3842
3843 pub fn outline_items_containing<T: ToOffset>(
3844 &self,
3845 range: Range<T>,
3846 include_extra_context: bool,
3847 theme: Option<&SyntaxTheme>,
3848 ) -> Vec<OutlineItem<Anchor>> {
3849 self.outline_items_containing_internal(
3850 range,
3851 include_extra_context,
3852 theme,
3853 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3854 )
3855 }
3856
3857 pub fn outline_items_as_points_containing<T: ToOffset>(
3858 &self,
3859 range: Range<T>,
3860 include_extra_context: bool,
3861 theme: Option<&SyntaxTheme>,
3862 ) -> Vec<OutlineItem<Point>> {
3863 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3864 range
3865 })
3866 }
3867
3868 fn outline_items_containing_internal<T: ToOffset, U>(
3869 &self,
3870 range: Range<T>,
3871 include_extra_context: bool,
3872 theme: Option<&SyntaxTheme>,
3873 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3874 ) -> Vec<OutlineItem<U>> {
3875 let range = range.to_offset(self);
3876 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3877 grammar.outline_config.as_ref().map(|c| &c.query)
3878 });
3879
3880 let mut items = Vec::new();
3881 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3882 while let Some(mat) = matches.peek() {
3883 let config = matches.grammars()[mat.grammar_index]
3884 .outline_config
3885 .as_ref()
3886 .unwrap();
3887 if let Some(item) =
3888 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3889 {
3890 items.push(item);
3891 } else if let Some(capture) = mat
3892 .captures
3893 .iter()
3894 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3895 {
3896 let capture_range = capture.node.start_position()..capture.node.end_position();
3897 let mut capture_row_range =
3898 capture_range.start.row as u32..capture_range.end.row as u32;
3899 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3900 {
3901 capture_row_range.end -= 1;
3902 }
3903 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3904 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3905 last_row_range.end = capture_row_range.end;
3906 } else {
3907 annotation_row_ranges.push(capture_row_range);
3908 }
3909 } else {
3910 annotation_row_ranges.push(capture_row_range);
3911 }
3912 }
3913 matches.advance();
3914 }
3915
3916 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3917
3918 // Assign depths based on containment relationships and convert to anchors.
3919 let mut item_ends_stack = Vec::<Point>::new();
3920 let mut anchor_items = Vec::new();
3921 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3922 for item in items {
3923 while let Some(last_end) = item_ends_stack.last().copied() {
3924 if last_end < item.range.end {
3925 item_ends_stack.pop();
3926 } else {
3927 break;
3928 }
3929 }
3930
3931 let mut annotation_row_range = None;
3932 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3933 let row_preceding_item = item.range.start.row.saturating_sub(1);
3934 if next_annotation_row_range.end < row_preceding_item {
3935 annotation_row_ranges.next();
3936 } else {
3937 if next_annotation_row_range.end == row_preceding_item {
3938 annotation_row_range = Some(next_annotation_row_range.clone());
3939 annotation_row_ranges.next();
3940 }
3941 break;
3942 }
3943 }
3944
3945 anchor_items.push(OutlineItem {
3946 depth: item_ends_stack.len(),
3947 range: range_callback(self, item.range.clone()),
3948 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3949 text: item.text,
3950 highlight_ranges: item.highlight_ranges,
3951 name_ranges: item.name_ranges,
3952 body_range: item.body_range.map(|r| range_callback(self, r)),
3953 annotation_range: annotation_row_range.map(|annotation_range| {
3954 let point_range = Point::new(annotation_range.start, 0)
3955 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3956 range_callback(self, point_range)
3957 }),
3958 });
3959 item_ends_stack.push(item.range.end);
3960 }
3961
3962 anchor_items
3963 }
3964
3965 fn next_outline_item(
3966 &self,
3967 config: &OutlineConfig,
3968 mat: &SyntaxMapMatch,
3969 range: &Range<usize>,
3970 include_extra_context: bool,
3971 theme: Option<&SyntaxTheme>,
3972 ) -> Option<OutlineItem<Point>> {
3973 let item_node = mat.captures.iter().find_map(|cap| {
3974 if cap.index == config.item_capture_ix {
3975 Some(cap.node)
3976 } else {
3977 None
3978 }
3979 })?;
3980
3981 let item_byte_range = item_node.byte_range();
3982 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3983 return None;
3984 }
3985 let item_point_range = Point::from_ts_point(item_node.start_position())
3986 ..Point::from_ts_point(item_node.end_position());
3987
3988 let mut open_point = None;
3989 let mut close_point = None;
3990
3991 let mut buffer_ranges = Vec::new();
3992 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3993 let mut range = node.start_byte()..node.end_byte();
3994 let start = node.start_position();
3995 if node.end_position().row > start.row {
3996 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3997 }
3998
3999 if !range.is_empty() {
4000 buffer_ranges.push((range, node_is_name));
4001 }
4002 };
4003
4004 for capture in mat.captures {
4005 if capture.index == config.name_capture_ix {
4006 add_to_buffer_ranges(capture.node, true);
4007 } else if Some(capture.index) == config.context_capture_ix
4008 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4009 {
4010 add_to_buffer_ranges(capture.node, false);
4011 } else {
4012 if Some(capture.index) == config.open_capture_ix {
4013 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4014 } else if Some(capture.index) == config.close_capture_ix {
4015 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4016 }
4017 }
4018 }
4019
4020 if buffer_ranges.is_empty() {
4021 return None;
4022 }
4023 let source_range_for_text =
4024 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4025
4026 let mut text = String::new();
4027 let mut highlight_ranges = Vec::new();
4028 let mut name_ranges = Vec::new();
4029 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4030 let mut last_buffer_range_end = 0;
4031 for (buffer_range, is_name) in buffer_ranges {
4032 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4033 if space_added {
4034 text.push(' ');
4035 }
4036 let before_append_len = text.len();
4037 let mut offset = buffer_range.start;
4038 chunks.seek(buffer_range.clone());
4039 for mut chunk in chunks.by_ref() {
4040 if chunk.text.len() > buffer_range.end - offset {
4041 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4042 offset = buffer_range.end;
4043 } else {
4044 offset += chunk.text.len();
4045 }
4046 let style = chunk
4047 .syntax_highlight_id
4048 .zip(theme)
4049 .and_then(|(highlight, theme)| highlight.style(theme));
4050 if let Some(style) = style {
4051 let start = text.len();
4052 let end = start + chunk.text.len();
4053 highlight_ranges.push((start..end, style));
4054 }
4055 text.push_str(chunk.text);
4056 if offset >= buffer_range.end {
4057 break;
4058 }
4059 }
4060 if is_name {
4061 let after_append_len = text.len();
4062 let start = if space_added && !name_ranges.is_empty() {
4063 before_append_len - 1
4064 } else {
4065 before_append_len
4066 };
4067 name_ranges.push(start..after_append_len);
4068 }
4069 last_buffer_range_end = buffer_range.end;
4070 }
4071
4072 Some(OutlineItem {
4073 depth: 0, // We'll calculate the depth later
4074 range: item_point_range,
4075 source_range_for_text: source_range_for_text.to_point(self),
4076 text,
4077 highlight_ranges,
4078 name_ranges,
4079 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4080 annotation_range: None,
4081 })
4082 }
4083
4084 pub fn function_body_fold_ranges<T: ToOffset>(
4085 &self,
4086 within: Range<T>,
4087 ) -> impl Iterator<Item = Range<usize>> + '_ {
4088 self.text_object_ranges(within, TreeSitterOptions::default())
4089 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4090 }
4091
4092 /// For each grammar in the language, runs the provided
4093 /// [`tree_sitter::Query`] against the given range.
4094 pub fn matches(
4095 &self,
4096 range: Range<usize>,
4097 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4098 ) -> SyntaxMapMatches<'_> {
4099 self.syntax.matches(range, self, query)
4100 }
4101
4102 pub fn all_bracket_ranges(
4103 &self,
4104 range: Range<usize>,
4105 ) -> impl Iterator<Item = BracketMatch> + '_ {
4106 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4107 grammar.brackets_config.as_ref().map(|c| &c.query)
4108 });
4109 let configs = matches
4110 .grammars()
4111 .iter()
4112 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4113 .collect::<Vec<_>>();
4114
4115 iter::from_fn(move || {
4116 while let Some(mat) = matches.peek() {
4117 let mut open = None;
4118 let mut close = None;
4119 let config = &configs[mat.grammar_index];
4120 let pattern = &config.patterns[mat.pattern_index];
4121 for capture in mat.captures {
4122 if capture.index == config.open_capture_ix {
4123 open = Some(capture.node.byte_range());
4124 } else if capture.index == config.close_capture_ix {
4125 close = Some(capture.node.byte_range());
4126 }
4127 }
4128
4129 matches.advance();
4130
4131 let Some((open_range, close_range)) = open.zip(close) else {
4132 continue;
4133 };
4134
4135 let bracket_range = open_range.start..=close_range.end;
4136 if !bracket_range.overlaps(&range) {
4137 continue;
4138 }
4139
4140 return Some(BracketMatch {
4141 open_range,
4142 close_range,
4143 newline_only: pattern.newline_only,
4144 });
4145 }
4146 None
4147 })
4148 }
4149
4150 /// Returns bracket range pairs overlapping or adjacent to `range`
4151 pub fn bracket_ranges<T: ToOffset>(
4152 &self,
4153 range: Range<T>,
4154 ) -> impl Iterator<Item = BracketMatch> + '_ {
4155 // Find bracket pairs that *inclusively* contain the given range.
4156 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4157 self.all_bracket_ranges(range)
4158 .filter(|pair| !pair.newline_only)
4159 }
4160
4161 pub fn debug_variables_query<T: ToOffset>(
4162 &self,
4163 range: Range<T>,
4164 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4165 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4166
4167 let mut matches = self.syntax.matches_with_options(
4168 range.clone(),
4169 &self.text,
4170 TreeSitterOptions::default(),
4171 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4172 );
4173
4174 let configs = matches
4175 .grammars()
4176 .iter()
4177 .map(|grammar| grammar.debug_variables_config.as_ref())
4178 .collect::<Vec<_>>();
4179
4180 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4181
4182 iter::from_fn(move || {
4183 loop {
4184 while let Some(capture) = captures.pop() {
4185 if capture.0.overlaps(&range) {
4186 return Some(capture);
4187 }
4188 }
4189
4190 let mat = matches.peek()?;
4191
4192 let Some(config) = configs[mat.grammar_index].as_ref() else {
4193 matches.advance();
4194 continue;
4195 };
4196
4197 for capture in mat.captures {
4198 let Some(ix) = config
4199 .objects_by_capture_ix
4200 .binary_search_by_key(&capture.index, |e| e.0)
4201 .ok()
4202 else {
4203 continue;
4204 };
4205 let text_object = config.objects_by_capture_ix[ix].1;
4206 let byte_range = capture.node.byte_range();
4207
4208 let mut found = false;
4209 for (range, existing) in captures.iter_mut() {
4210 if existing == &text_object {
4211 range.start = range.start.min(byte_range.start);
4212 range.end = range.end.max(byte_range.end);
4213 found = true;
4214 break;
4215 }
4216 }
4217
4218 if !found {
4219 captures.push((byte_range, text_object));
4220 }
4221 }
4222
4223 matches.advance();
4224 }
4225 })
4226 }
4227
4228 pub fn text_object_ranges<T: ToOffset>(
4229 &self,
4230 range: Range<T>,
4231 options: TreeSitterOptions,
4232 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4233 let range =
4234 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4235
4236 let mut matches =
4237 self.syntax
4238 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4239 grammar.text_object_config.as_ref().map(|c| &c.query)
4240 });
4241
4242 let configs = matches
4243 .grammars()
4244 .iter()
4245 .map(|grammar| grammar.text_object_config.as_ref())
4246 .collect::<Vec<_>>();
4247
4248 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4249
4250 iter::from_fn(move || {
4251 loop {
4252 while let Some(capture) = captures.pop() {
4253 if capture.0.overlaps(&range) {
4254 return Some(capture);
4255 }
4256 }
4257
4258 let mat = matches.peek()?;
4259
4260 let Some(config) = configs[mat.grammar_index].as_ref() else {
4261 matches.advance();
4262 continue;
4263 };
4264
4265 for capture in mat.captures {
4266 let Some(ix) = config
4267 .text_objects_by_capture_ix
4268 .binary_search_by_key(&capture.index, |e| e.0)
4269 .ok()
4270 else {
4271 continue;
4272 };
4273 let text_object = config.text_objects_by_capture_ix[ix].1;
4274 let byte_range = capture.node.byte_range();
4275
4276 let mut found = false;
4277 for (range, existing) in captures.iter_mut() {
4278 if existing == &text_object {
4279 range.start = range.start.min(byte_range.start);
4280 range.end = range.end.max(byte_range.end);
4281 found = true;
4282 break;
4283 }
4284 }
4285
4286 if !found {
4287 captures.push((byte_range, text_object));
4288 }
4289 }
4290
4291 matches.advance();
4292 }
4293 })
4294 }
4295
4296 /// Returns enclosing bracket ranges containing the given range
4297 pub fn enclosing_bracket_ranges<T: ToOffset>(
4298 &self,
4299 range: Range<T>,
4300 ) -> impl Iterator<Item = BracketMatch> + '_ {
4301 let range = range.start.to_offset(self)..range.end.to_offset(self);
4302
4303 self.bracket_ranges(range.clone()).filter(move |pair| {
4304 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4305 })
4306 }
4307
4308 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4309 ///
4310 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4311 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4312 &self,
4313 range: Range<T>,
4314 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4315 ) -> Option<(Range<usize>, Range<usize>)> {
4316 let range = range.start.to_offset(self)..range.end.to_offset(self);
4317
4318 // Get the ranges of the innermost pair of brackets.
4319 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4320
4321 for pair in self.enclosing_bracket_ranges(range) {
4322 if let Some(range_filter) = range_filter
4323 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4324 {
4325 continue;
4326 }
4327
4328 let len = pair.close_range.end - pair.open_range.start;
4329
4330 if let Some((existing_open, existing_close)) = &result {
4331 let existing_len = existing_close.end - existing_open.start;
4332 if len > existing_len {
4333 continue;
4334 }
4335 }
4336
4337 result = Some((pair.open_range, pair.close_range));
4338 }
4339
4340 result
4341 }
4342
4343 /// Returns anchor ranges for any matches of the redaction query.
4344 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4345 /// will be run on the relevant section of the buffer.
4346 pub fn redacted_ranges<T: ToOffset>(
4347 &self,
4348 range: Range<T>,
4349 ) -> impl Iterator<Item = Range<usize>> + '_ {
4350 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4351 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4352 grammar
4353 .redactions_config
4354 .as_ref()
4355 .map(|config| &config.query)
4356 });
4357
4358 let configs = syntax_matches
4359 .grammars()
4360 .iter()
4361 .map(|grammar| grammar.redactions_config.as_ref())
4362 .collect::<Vec<_>>();
4363
4364 iter::from_fn(move || {
4365 let redacted_range = syntax_matches
4366 .peek()
4367 .and_then(|mat| {
4368 configs[mat.grammar_index].and_then(|config| {
4369 mat.captures
4370 .iter()
4371 .find(|capture| capture.index == config.redaction_capture_ix)
4372 })
4373 })
4374 .map(|mat| mat.node.byte_range());
4375 syntax_matches.advance();
4376 redacted_range
4377 })
4378 }
4379
4380 pub fn injections_intersecting_range<T: ToOffset>(
4381 &self,
4382 range: Range<T>,
4383 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4384 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4385
4386 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4387 grammar
4388 .injection_config
4389 .as_ref()
4390 .map(|config| &config.query)
4391 });
4392
4393 let configs = syntax_matches
4394 .grammars()
4395 .iter()
4396 .map(|grammar| grammar.injection_config.as_ref())
4397 .collect::<Vec<_>>();
4398
4399 iter::from_fn(move || {
4400 let ranges = syntax_matches.peek().and_then(|mat| {
4401 let config = &configs[mat.grammar_index]?;
4402 let content_capture_range = mat.captures.iter().find_map(|capture| {
4403 if capture.index == config.content_capture_ix {
4404 Some(capture.node.byte_range())
4405 } else {
4406 None
4407 }
4408 })?;
4409 let language = self.language_at(content_capture_range.start)?;
4410 Some((content_capture_range, language))
4411 });
4412 syntax_matches.advance();
4413 ranges
4414 })
4415 }
4416
4417 pub fn runnable_ranges(
4418 &self,
4419 offset_range: Range<usize>,
4420 ) -> impl Iterator<Item = RunnableRange> + '_ {
4421 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4422 grammar.runnable_config.as_ref().map(|config| &config.query)
4423 });
4424
4425 let test_configs = syntax_matches
4426 .grammars()
4427 .iter()
4428 .map(|grammar| grammar.runnable_config.as_ref())
4429 .collect::<Vec<_>>();
4430
4431 iter::from_fn(move || {
4432 loop {
4433 let mat = syntax_matches.peek()?;
4434
4435 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4436 let mut run_range = None;
4437 let full_range = mat.captures.iter().fold(
4438 Range {
4439 start: usize::MAX,
4440 end: 0,
4441 },
4442 |mut acc, next| {
4443 let byte_range = next.node.byte_range();
4444 if acc.start > byte_range.start {
4445 acc.start = byte_range.start;
4446 }
4447 if acc.end < byte_range.end {
4448 acc.end = byte_range.end;
4449 }
4450 acc
4451 },
4452 );
4453 if full_range.start > full_range.end {
4454 // We did not find a full spanning range of this match.
4455 return None;
4456 }
4457 let extra_captures: SmallVec<[_; 1]> =
4458 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4459 test_configs
4460 .extra_captures
4461 .get(capture.index as usize)
4462 .cloned()
4463 .and_then(|tag_name| match tag_name {
4464 RunnableCapture::Named(name) => {
4465 Some((capture.node.byte_range(), name))
4466 }
4467 RunnableCapture::Run => {
4468 let _ = run_range.insert(capture.node.byte_range());
4469 None
4470 }
4471 })
4472 }));
4473 let run_range = run_range?;
4474 let tags = test_configs
4475 .query
4476 .property_settings(mat.pattern_index)
4477 .iter()
4478 .filter_map(|property| {
4479 if *property.key == *"tag" {
4480 property
4481 .value
4482 .as_ref()
4483 .map(|value| RunnableTag(value.to_string().into()))
4484 } else {
4485 None
4486 }
4487 })
4488 .collect();
4489 let extra_captures = extra_captures
4490 .into_iter()
4491 .map(|(range, name)| {
4492 (
4493 name.to_string(),
4494 self.text_for_range(range).collect::<String>(),
4495 )
4496 })
4497 .collect();
4498 // All tags should have the same range.
4499 Some(RunnableRange {
4500 run_range,
4501 full_range,
4502 runnable: Runnable {
4503 tags,
4504 language: mat.language,
4505 buffer: self.remote_id(),
4506 },
4507 extra_captures,
4508 buffer_id: self.remote_id(),
4509 })
4510 });
4511
4512 syntax_matches.advance();
4513 if test_range.is_some() {
4514 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4515 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4516 return test_range;
4517 }
4518 }
4519 })
4520 }
4521
4522 /// Returns selections for remote peers intersecting the given range.
4523 #[allow(clippy::type_complexity)]
4524 pub fn selections_in_range(
4525 &self,
4526 range: Range<Anchor>,
4527 include_local: bool,
4528 ) -> impl Iterator<
4529 Item = (
4530 ReplicaId,
4531 bool,
4532 CursorShape,
4533 impl Iterator<Item = &Selection<Anchor>> + '_,
4534 ),
4535 > + '_ {
4536 self.remote_selections
4537 .iter()
4538 .filter(move |(replica_id, set)| {
4539 (include_local || **replica_id != self.text.replica_id())
4540 && !set.selections.is_empty()
4541 })
4542 .map(move |(replica_id, set)| {
4543 let start_ix = match set.selections.binary_search_by(|probe| {
4544 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4545 }) {
4546 Ok(ix) | Err(ix) => ix,
4547 };
4548 let end_ix = match set.selections.binary_search_by(|probe| {
4549 probe.start.cmp(&range.end, self).then(Ordering::Less)
4550 }) {
4551 Ok(ix) | Err(ix) => ix,
4552 };
4553
4554 (
4555 *replica_id,
4556 set.line_mode,
4557 set.cursor_shape,
4558 set.selections[start_ix..end_ix].iter(),
4559 )
4560 })
4561 }
4562
4563 /// Returns if the buffer contains any diagnostics.
4564 pub fn has_diagnostics(&self) -> bool {
4565 !self.diagnostics.is_empty()
4566 }
4567
4568 /// Returns all the diagnostics intersecting the given range.
4569 pub fn diagnostics_in_range<'a, T, O>(
4570 &'a self,
4571 search_range: Range<T>,
4572 reversed: bool,
4573 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4574 where
4575 T: 'a + Clone + ToOffset,
4576 O: 'a + FromAnchor,
4577 {
4578 let mut iterators: Vec<_> = self
4579 .diagnostics
4580 .iter()
4581 .map(|(_, collection)| {
4582 collection
4583 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4584 .peekable()
4585 })
4586 .collect();
4587
4588 std::iter::from_fn(move || {
4589 let (next_ix, _) = iterators
4590 .iter_mut()
4591 .enumerate()
4592 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4593 .min_by(|(_, a), (_, b)| {
4594 let cmp = a
4595 .range
4596 .start
4597 .cmp(&b.range.start, self)
4598 // when range is equal, sort by diagnostic severity
4599 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4600 // and stabilize order with group_id
4601 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4602 if reversed { cmp.reverse() } else { cmp }
4603 })?;
4604 iterators[next_ix]
4605 .next()
4606 .map(
4607 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4608 diagnostic,
4609 range: FromAnchor::from_anchor(&range.start, self)
4610 ..FromAnchor::from_anchor(&range.end, self),
4611 },
4612 )
4613 })
4614 }
4615
4616 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4617 /// should be used instead.
4618 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4619 &self.diagnostics
4620 }
4621
4622 /// Returns all the diagnostic groups associated with the given
4623 /// language server ID. If no language server ID is provided,
4624 /// all diagnostics groups are returned.
4625 pub fn diagnostic_groups(
4626 &self,
4627 language_server_id: Option<LanguageServerId>,
4628 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4629 let mut groups = Vec::new();
4630
4631 if let Some(language_server_id) = language_server_id {
4632 if let Ok(ix) = self
4633 .diagnostics
4634 .binary_search_by_key(&language_server_id, |e| e.0)
4635 {
4636 self.diagnostics[ix]
4637 .1
4638 .groups(language_server_id, &mut groups, self);
4639 }
4640 } else {
4641 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4642 diagnostics.groups(*language_server_id, &mut groups, self);
4643 }
4644 }
4645
4646 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4647 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4648 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4649 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4650 });
4651
4652 groups
4653 }
4654
4655 /// Returns an iterator over the diagnostics for the given group.
4656 pub fn diagnostic_group<O>(
4657 &self,
4658 group_id: usize,
4659 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4660 where
4661 O: FromAnchor + 'static,
4662 {
4663 self.diagnostics
4664 .iter()
4665 .flat_map(move |(_, set)| set.group(group_id, self))
4666 }
4667
4668 /// An integer version number that accounts for all updates besides
4669 /// the buffer's text itself (which is versioned via a version vector).
4670 pub fn non_text_state_update_count(&self) -> usize {
4671 self.non_text_state_update_count
4672 }
4673
4674 /// An integer version that changes when the buffer's syntax changes.
4675 pub fn syntax_update_count(&self) -> usize {
4676 self.syntax.update_count()
4677 }
4678
4679 /// Returns a snapshot of underlying file.
4680 pub fn file(&self) -> Option<&Arc<dyn File>> {
4681 self.file.as_ref()
4682 }
4683
4684 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4685 if let Some(file) = self.file() {
4686 if file.path().file_name().is_none() || include_root {
4687 Some(file.full_path(cx).to_string_lossy().into_owned())
4688 } else {
4689 Some(file.path().display(file.path_style(cx)).to_string())
4690 }
4691 } else {
4692 None
4693 }
4694 }
4695
4696 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4697 let query_str = query.fuzzy_contents;
4698 if query_str.is_some_and(|query| query.is_empty()) {
4699 return BTreeMap::default();
4700 }
4701
4702 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4703 language,
4704 override_id: None,
4705 }));
4706
4707 let mut query_ix = 0;
4708 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4709 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4710
4711 let mut words = BTreeMap::default();
4712 let mut current_word_start_ix = None;
4713 let mut chunk_ix = query.range.start;
4714 for chunk in self.chunks(query.range, false) {
4715 for (i, c) in chunk.text.char_indices() {
4716 let ix = chunk_ix + i;
4717 if classifier.is_word(c) {
4718 if current_word_start_ix.is_none() {
4719 current_word_start_ix = Some(ix);
4720 }
4721
4722 if let Some(query_chars) = &query_chars
4723 && query_ix < query_len
4724 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4725 {
4726 query_ix += 1;
4727 }
4728 continue;
4729 } else if let Some(word_start) = current_word_start_ix.take()
4730 && query_ix == query_len
4731 {
4732 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4733 let mut word_text = self.text_for_range(word_start..ix).peekable();
4734 let first_char = word_text
4735 .peek()
4736 .and_then(|first_chunk| first_chunk.chars().next());
4737 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4738 if !query.skip_digits
4739 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4740 {
4741 words.insert(word_text.collect(), word_range);
4742 }
4743 }
4744 query_ix = 0;
4745 }
4746 chunk_ix += chunk.text.len();
4747 }
4748
4749 words
4750 }
4751}
4752
4753pub struct WordsQuery<'a> {
4754 /// Only returns words with all chars from the fuzzy string in them.
4755 pub fuzzy_contents: Option<&'a str>,
4756 /// Skips words that start with a digit.
4757 pub skip_digits: bool,
4758 /// Buffer offset range, to look for words.
4759 pub range: Range<usize>,
4760}
4761
4762fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4763 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4764}
4765
4766fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4767 let mut result = IndentSize::spaces(0);
4768 for c in text {
4769 let kind = match c {
4770 ' ' => IndentKind::Space,
4771 '\t' => IndentKind::Tab,
4772 _ => break,
4773 };
4774 if result.len == 0 {
4775 result.kind = kind;
4776 }
4777 result.len += 1;
4778 }
4779 result
4780}
4781
4782impl Clone for BufferSnapshot {
4783 fn clone(&self) -> Self {
4784 Self {
4785 text: self.text.clone(),
4786 syntax: self.syntax.clone(),
4787 file: self.file.clone(),
4788 remote_selections: self.remote_selections.clone(),
4789 diagnostics: self.diagnostics.clone(),
4790 language: self.language.clone(),
4791 non_text_state_update_count: self.non_text_state_update_count,
4792 }
4793 }
4794}
4795
4796impl Deref for BufferSnapshot {
4797 type Target = text::BufferSnapshot;
4798
4799 fn deref(&self) -> &Self::Target {
4800 &self.text
4801 }
4802}
4803
4804unsafe impl Send for BufferChunks<'_> {}
4805
4806impl<'a> BufferChunks<'a> {
4807 pub(crate) fn new(
4808 text: &'a Rope,
4809 range: Range<usize>,
4810 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4811 diagnostics: bool,
4812 buffer_snapshot: Option<&'a BufferSnapshot>,
4813 ) -> Self {
4814 let mut highlights = None;
4815 if let Some((captures, highlight_maps)) = syntax {
4816 highlights = Some(BufferChunkHighlights {
4817 captures,
4818 next_capture: None,
4819 stack: Default::default(),
4820 highlight_maps,
4821 })
4822 }
4823
4824 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4825 let chunks = text.chunks_in_range(range.clone());
4826
4827 let mut this = BufferChunks {
4828 range,
4829 buffer_snapshot,
4830 chunks,
4831 diagnostic_endpoints,
4832 error_depth: 0,
4833 warning_depth: 0,
4834 information_depth: 0,
4835 hint_depth: 0,
4836 unnecessary_depth: 0,
4837 underline: true,
4838 highlights,
4839 };
4840 this.initialize_diagnostic_endpoints();
4841 this
4842 }
4843
4844 /// Seeks to the given byte offset in the buffer.
4845 pub fn seek(&mut self, range: Range<usize>) {
4846 let old_range = std::mem::replace(&mut self.range, range.clone());
4847 self.chunks.set_range(self.range.clone());
4848 if let Some(highlights) = self.highlights.as_mut() {
4849 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4850 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4851 highlights
4852 .stack
4853 .retain(|(end_offset, _)| *end_offset > range.start);
4854 if let Some(capture) = &highlights.next_capture
4855 && range.start >= capture.node.start_byte()
4856 {
4857 let next_capture_end = capture.node.end_byte();
4858 if range.start < next_capture_end {
4859 highlights.stack.push((
4860 next_capture_end,
4861 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4862 ));
4863 }
4864 highlights.next_capture.take();
4865 }
4866 } else if let Some(snapshot) = self.buffer_snapshot {
4867 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4868 *highlights = BufferChunkHighlights {
4869 captures,
4870 next_capture: None,
4871 stack: Default::default(),
4872 highlight_maps,
4873 };
4874 } else {
4875 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4876 // Seeking such BufferChunks is not supported.
4877 debug_assert!(
4878 false,
4879 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4880 );
4881 }
4882
4883 highlights.captures.set_byte_range(self.range.clone());
4884 self.initialize_diagnostic_endpoints();
4885 }
4886 }
4887
4888 fn initialize_diagnostic_endpoints(&mut self) {
4889 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4890 && let Some(buffer) = self.buffer_snapshot
4891 {
4892 let mut diagnostic_endpoints = Vec::new();
4893 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4894 diagnostic_endpoints.push(DiagnosticEndpoint {
4895 offset: entry.range.start,
4896 is_start: true,
4897 severity: entry.diagnostic.severity,
4898 is_unnecessary: entry.diagnostic.is_unnecessary,
4899 underline: entry.diagnostic.underline,
4900 });
4901 diagnostic_endpoints.push(DiagnosticEndpoint {
4902 offset: entry.range.end,
4903 is_start: false,
4904 severity: entry.diagnostic.severity,
4905 is_unnecessary: entry.diagnostic.is_unnecessary,
4906 underline: entry.diagnostic.underline,
4907 });
4908 }
4909 diagnostic_endpoints
4910 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4911 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4912 self.hint_depth = 0;
4913 self.error_depth = 0;
4914 self.warning_depth = 0;
4915 self.information_depth = 0;
4916 }
4917 }
4918
4919 /// The current byte offset in the buffer.
4920 pub fn offset(&self) -> usize {
4921 self.range.start
4922 }
4923
4924 pub fn range(&self) -> Range<usize> {
4925 self.range.clone()
4926 }
4927
4928 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4929 let depth = match endpoint.severity {
4930 DiagnosticSeverity::ERROR => &mut self.error_depth,
4931 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4932 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4933 DiagnosticSeverity::HINT => &mut self.hint_depth,
4934 _ => return,
4935 };
4936 if endpoint.is_start {
4937 *depth += 1;
4938 } else {
4939 *depth -= 1;
4940 }
4941
4942 if endpoint.is_unnecessary {
4943 if endpoint.is_start {
4944 self.unnecessary_depth += 1;
4945 } else {
4946 self.unnecessary_depth -= 1;
4947 }
4948 }
4949 }
4950
4951 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4952 if self.error_depth > 0 {
4953 Some(DiagnosticSeverity::ERROR)
4954 } else if self.warning_depth > 0 {
4955 Some(DiagnosticSeverity::WARNING)
4956 } else if self.information_depth > 0 {
4957 Some(DiagnosticSeverity::INFORMATION)
4958 } else if self.hint_depth > 0 {
4959 Some(DiagnosticSeverity::HINT)
4960 } else {
4961 None
4962 }
4963 }
4964
4965 fn current_code_is_unnecessary(&self) -> bool {
4966 self.unnecessary_depth > 0
4967 }
4968}
4969
4970impl<'a> Iterator for BufferChunks<'a> {
4971 type Item = Chunk<'a>;
4972
4973 fn next(&mut self) -> Option<Self::Item> {
4974 let mut next_capture_start = usize::MAX;
4975 let mut next_diagnostic_endpoint = usize::MAX;
4976
4977 if let Some(highlights) = self.highlights.as_mut() {
4978 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4979 if *parent_capture_end <= self.range.start {
4980 highlights.stack.pop();
4981 } else {
4982 break;
4983 }
4984 }
4985
4986 if highlights.next_capture.is_none() {
4987 highlights.next_capture = highlights.captures.next();
4988 }
4989
4990 while let Some(capture) = highlights.next_capture.as_ref() {
4991 if self.range.start < capture.node.start_byte() {
4992 next_capture_start = capture.node.start_byte();
4993 break;
4994 } else {
4995 let highlight_id =
4996 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4997 highlights
4998 .stack
4999 .push((capture.node.end_byte(), highlight_id));
5000 highlights.next_capture = highlights.captures.next();
5001 }
5002 }
5003 }
5004
5005 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5006 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5007 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5008 if endpoint.offset <= self.range.start {
5009 self.update_diagnostic_depths(endpoint);
5010 diagnostic_endpoints.next();
5011 self.underline = endpoint.underline;
5012 } else {
5013 next_diagnostic_endpoint = endpoint.offset;
5014 break;
5015 }
5016 }
5017 }
5018 self.diagnostic_endpoints = diagnostic_endpoints;
5019
5020 if let Some(ChunkBitmaps {
5021 text: chunk,
5022 chars: chars_map,
5023 tabs,
5024 }) = self.chunks.peek_with_bitmaps()
5025 {
5026 let chunk_start = self.range.start;
5027 let mut chunk_end = (self.chunks.offset() + chunk.len())
5028 .min(next_capture_start)
5029 .min(next_diagnostic_endpoint);
5030 let mut highlight_id = None;
5031 if let Some(highlights) = self.highlights.as_ref()
5032 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5033 {
5034 chunk_end = chunk_end.min(*parent_capture_end);
5035 highlight_id = Some(*parent_highlight_id);
5036 }
5037 let bit_start = chunk_start - self.chunks.offset();
5038 let bit_end = chunk_end - self.chunks.offset();
5039
5040 let slice = &chunk[bit_start..bit_end];
5041
5042 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5043 let tabs = (tabs >> bit_start) & mask;
5044 let chars = (chars_map >> bit_start) & mask;
5045
5046 self.range.start = chunk_end;
5047 if self.range.start == self.chunks.offset() + chunk.len() {
5048 self.chunks.next().unwrap();
5049 }
5050
5051 Some(Chunk {
5052 text: slice,
5053 syntax_highlight_id: highlight_id,
5054 underline: self.underline,
5055 diagnostic_severity: self.current_diagnostic_severity(),
5056 is_unnecessary: self.current_code_is_unnecessary(),
5057 tabs,
5058 chars,
5059 ..Chunk::default()
5060 })
5061 } else {
5062 None
5063 }
5064 }
5065}
5066
5067impl operation_queue::Operation for Operation {
5068 fn lamport_timestamp(&self) -> clock::Lamport {
5069 match self {
5070 Operation::Buffer(_) => {
5071 unreachable!("buffer operations should never be deferred at this layer")
5072 }
5073 Operation::UpdateDiagnostics {
5074 lamport_timestamp, ..
5075 }
5076 | Operation::UpdateSelections {
5077 lamport_timestamp, ..
5078 }
5079 | Operation::UpdateCompletionTriggers {
5080 lamport_timestamp, ..
5081 }
5082 | Operation::UpdateLineEnding {
5083 lamport_timestamp, ..
5084 } => *lamport_timestamp,
5085 }
5086 }
5087}
5088
5089impl Default for Diagnostic {
5090 fn default() -> Self {
5091 Self {
5092 source: Default::default(),
5093 source_kind: DiagnosticSourceKind::Other,
5094 code: None,
5095 code_description: None,
5096 severity: DiagnosticSeverity::ERROR,
5097 message: Default::default(),
5098 markdown: None,
5099 group_id: 0,
5100 is_primary: false,
5101 is_disk_based: false,
5102 is_unnecessary: false,
5103 underline: true,
5104 data: None,
5105 }
5106 }
5107}
5108
5109impl IndentSize {
5110 /// Returns an [`IndentSize`] representing the given spaces.
5111 pub fn spaces(len: u32) -> Self {
5112 Self {
5113 len,
5114 kind: IndentKind::Space,
5115 }
5116 }
5117
5118 /// Returns an [`IndentSize`] representing a tab.
5119 pub fn tab() -> Self {
5120 Self {
5121 len: 1,
5122 kind: IndentKind::Tab,
5123 }
5124 }
5125
5126 /// An iterator over the characters represented by this [`IndentSize`].
5127 pub fn chars(&self) -> impl Iterator<Item = char> {
5128 iter::repeat(self.char()).take(self.len as usize)
5129 }
5130
5131 /// The character representation of this [`IndentSize`].
5132 pub fn char(&self) -> char {
5133 match self.kind {
5134 IndentKind::Space => ' ',
5135 IndentKind::Tab => '\t',
5136 }
5137 }
5138
5139 /// Consumes the current [`IndentSize`] and returns a new one that has
5140 /// been shrunk or enlarged by the given size along the given direction.
5141 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5142 match direction {
5143 Ordering::Less => {
5144 if self.kind == size.kind && self.len >= size.len {
5145 self.len -= size.len;
5146 }
5147 }
5148 Ordering::Equal => {}
5149 Ordering::Greater => {
5150 if self.len == 0 {
5151 self = size;
5152 } else if self.kind == size.kind {
5153 self.len += size.len;
5154 }
5155 }
5156 }
5157 self
5158 }
5159
5160 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5161 match self.kind {
5162 IndentKind::Space => self.len as usize,
5163 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5164 }
5165 }
5166}
5167
5168#[cfg(any(test, feature = "test-support"))]
5169pub struct TestFile {
5170 pub path: Arc<RelPath>,
5171 pub root_name: String,
5172 pub local_root: Option<PathBuf>,
5173}
5174
5175#[cfg(any(test, feature = "test-support"))]
5176impl File for TestFile {
5177 fn path(&self) -> &Arc<RelPath> {
5178 &self.path
5179 }
5180
5181 fn full_path(&self, _: &gpui::App) -> PathBuf {
5182 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5183 }
5184
5185 fn as_local(&self) -> Option<&dyn LocalFile> {
5186 if self.local_root.is_some() {
5187 Some(self)
5188 } else {
5189 None
5190 }
5191 }
5192
5193 fn disk_state(&self) -> DiskState {
5194 unimplemented!()
5195 }
5196
5197 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5198 self.path().file_name().unwrap_or(self.root_name.as_ref())
5199 }
5200
5201 fn worktree_id(&self, _: &App) -> WorktreeId {
5202 WorktreeId::from_usize(0)
5203 }
5204
5205 fn to_proto(&self, _: &App) -> rpc::proto::File {
5206 unimplemented!()
5207 }
5208
5209 fn is_private(&self) -> bool {
5210 false
5211 }
5212
5213 fn path_style(&self, _cx: &App) -> PathStyle {
5214 PathStyle::local()
5215 }
5216}
5217
5218#[cfg(any(test, feature = "test-support"))]
5219impl LocalFile for TestFile {
5220 fn abs_path(&self, _cx: &App) -> PathBuf {
5221 PathBuf::from(self.local_root.as_ref().unwrap())
5222 .join(&self.root_name)
5223 .join(self.path.as_std_path())
5224 }
5225
5226 fn load(&self, _cx: &App) -> Task<Result<String>> {
5227 unimplemented!()
5228 }
5229
5230 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5231 unimplemented!()
5232 }
5233}
5234
5235pub(crate) fn contiguous_ranges(
5236 values: impl Iterator<Item = u32>,
5237 max_len: usize,
5238) -> impl Iterator<Item = Range<u32>> {
5239 let mut values = values;
5240 let mut current_range: Option<Range<u32>> = None;
5241 std::iter::from_fn(move || {
5242 loop {
5243 if let Some(value) = values.next() {
5244 if let Some(range) = &mut current_range
5245 && value == range.end
5246 && range.len() < max_len
5247 {
5248 range.end += 1;
5249 continue;
5250 }
5251
5252 let prev_range = current_range.clone();
5253 current_range = Some(value..(value + 1));
5254 if prev_range.is_some() {
5255 return prev_range;
5256 }
5257 } else {
5258 return current_range.take();
5259 }
5260 }
5261 })
5262}
5263
5264#[derive(Default, Debug)]
5265pub struct CharClassifier {
5266 scope: Option<LanguageScope>,
5267 scope_context: Option<CharScopeContext>,
5268 ignore_punctuation: bool,
5269}
5270
5271impl CharClassifier {
5272 pub fn new(scope: Option<LanguageScope>) -> Self {
5273 Self {
5274 scope,
5275 scope_context: None,
5276 ignore_punctuation: false,
5277 }
5278 }
5279
5280 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5281 Self {
5282 scope_context,
5283 ..self
5284 }
5285 }
5286
5287 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5288 Self {
5289 ignore_punctuation,
5290 ..self
5291 }
5292 }
5293
5294 pub fn is_whitespace(&self, c: char) -> bool {
5295 self.kind(c) == CharKind::Whitespace
5296 }
5297
5298 pub fn is_word(&self, c: char) -> bool {
5299 self.kind(c) == CharKind::Word
5300 }
5301
5302 pub fn is_punctuation(&self, c: char) -> bool {
5303 self.kind(c) == CharKind::Punctuation
5304 }
5305
5306 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5307 if c.is_alphanumeric() || c == '_' {
5308 return CharKind::Word;
5309 }
5310
5311 if let Some(scope) = &self.scope {
5312 let characters = match self.scope_context {
5313 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5314 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5315 None => scope.word_characters(),
5316 };
5317 if let Some(characters) = characters
5318 && characters.contains(&c)
5319 {
5320 return CharKind::Word;
5321 }
5322 }
5323
5324 if c.is_whitespace() {
5325 return CharKind::Whitespace;
5326 }
5327
5328 if ignore_punctuation {
5329 CharKind::Word
5330 } else {
5331 CharKind::Punctuation
5332 }
5333 }
5334
5335 pub fn kind(&self, c: char) -> CharKind {
5336 self.kind_with(c, self.ignore_punctuation)
5337 }
5338}
5339
5340/// Find all of the ranges of whitespace that occur at the ends of lines
5341/// in the given rope.
5342///
5343/// This could also be done with a regex search, but this implementation
5344/// avoids copying text.
5345pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5346 let mut ranges = Vec::new();
5347
5348 let mut offset = 0;
5349 let mut prev_chunk_trailing_whitespace_range = 0..0;
5350 for chunk in rope.chunks() {
5351 let mut prev_line_trailing_whitespace_range = 0..0;
5352 for (i, line) in chunk.split('\n').enumerate() {
5353 let line_end_offset = offset + line.len();
5354 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5355 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5356
5357 if i == 0 && trimmed_line_len == 0 {
5358 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5359 }
5360 if !prev_line_trailing_whitespace_range.is_empty() {
5361 ranges.push(prev_line_trailing_whitespace_range);
5362 }
5363
5364 offset = line_end_offset + 1;
5365 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5366 }
5367
5368 offset -= 1;
5369 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5370 }
5371
5372 if !prev_chunk_trailing_whitespace_range.is_empty() {
5373 ranges.push(prev_chunk_trailing_whitespace_range);
5374 }
5375
5376 ranges
5377}