1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::{MTime, encodings::EncodingWrapper};
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<std::sync::Mutex<&'static Encoding>>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(
418 &self,
419 cx: &App,
420 encoding: EncodingWrapper,
421 force: bool,
422 detect_utf16: bool,
423 buffer_encoding: Option<Arc<std::sync::Mutex<&'static Encoding>>>,
424 ) -> Task<Result<String>>;
425
426 /// Loads the file's contents from disk.
427 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
428}
429
430/// The auto-indent behavior associated with an editing operation.
431/// For some editing operations, each affected line of text has its
432/// indentation recomputed. For other operations, the entire block
433/// of edited text is adjusted uniformly.
434#[derive(Clone, Debug)]
435pub enum AutoindentMode {
436 /// Indent each line of inserted text.
437 EachLine,
438 /// Apply the same indentation adjustment to all of the lines
439 /// in a given insertion.
440 Block {
441 /// The original indentation column of the first line of each
442 /// insertion, if it has been copied.
443 ///
444 /// Knowing this makes it possible to preserve the relative indentation
445 /// of every line in the insertion from when it was copied.
446 ///
447 /// If the original indent column is `a`, and the first line of insertion
448 /// is then auto-indented to column `b`, then every other line of
449 /// the insertion will be auto-indented to column `b - a`
450 original_indent_columns: Vec<Option<u32>>,
451 },
452}
453
454#[derive(Clone)]
455struct AutoindentRequest {
456 before_edit: BufferSnapshot,
457 entries: Vec<AutoindentRequestEntry>,
458 is_block_mode: bool,
459 ignore_empty_lines: bool,
460}
461
462#[derive(Debug, Clone)]
463struct AutoindentRequestEntry {
464 /// A range of the buffer whose indentation should be adjusted.
465 range: Range<Anchor>,
466 /// Whether or not these lines should be considered brand new, for the
467 /// purpose of auto-indent. When text is not new, its indentation will
468 /// only be adjusted if the suggested indentation level has *changed*
469 /// since the edit was made.
470 first_line_is_new: bool,
471 indent_size: IndentSize,
472 original_indent_column: Option<u32>,
473}
474
475#[derive(Debug)]
476struct IndentSuggestion {
477 basis_row: u32,
478 delta: Ordering,
479 within_error: bool,
480}
481
482struct BufferChunkHighlights<'a> {
483 captures: SyntaxMapCaptures<'a>,
484 next_capture: Option<SyntaxMapCapture<'a>>,
485 stack: Vec<(usize, HighlightId)>,
486 highlight_maps: Vec<HighlightMap>,
487}
488
489/// An iterator that yields chunks of a buffer's text, along with their
490/// syntax highlights and diagnostic status.
491pub struct BufferChunks<'a> {
492 buffer_snapshot: Option<&'a BufferSnapshot>,
493 range: Range<usize>,
494 chunks: text::Chunks<'a>,
495 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
496 error_depth: usize,
497 warning_depth: usize,
498 information_depth: usize,
499 hint_depth: usize,
500 unnecessary_depth: usize,
501 underline: bool,
502 highlights: Option<BufferChunkHighlights<'a>>,
503}
504
505/// A chunk of a buffer's text, along with its syntax highlight and
506/// diagnostic status.
507#[derive(Clone, Debug, Default)]
508pub struct Chunk<'a> {
509 /// The text of the chunk.
510 pub text: &'a str,
511 /// The syntax highlighting style of the chunk.
512 pub syntax_highlight_id: Option<HighlightId>,
513 /// The highlight style that has been applied to this chunk in
514 /// the editor.
515 pub highlight_style: Option<HighlightStyle>,
516 /// The severity of diagnostic associated with this chunk, if any.
517 pub diagnostic_severity: Option<DiagnosticSeverity>,
518 /// A bitset of which characters are tabs in this string.
519 pub tabs: u128,
520 /// Bitmap of character indices in this chunk
521 pub chars: u128,
522 /// Whether this chunk of text is marked as unnecessary.
523 pub is_unnecessary: bool,
524 /// Whether this chunk of text was originally a tab character.
525 pub is_tab: bool,
526 /// Whether this chunk of text was originally an inlay.
527 pub is_inlay: bool,
528 /// Whether to underline the corresponding text range in the editor.
529 pub underline: bool,
530}
531
532/// A set of edits to a given version of a buffer, computed asynchronously.
533#[derive(Debug)]
534pub struct Diff {
535 pub base_version: clock::Global,
536 pub line_ending: LineEnding,
537 pub edits: Vec<(Range<usize>, Arc<str>)>,
538}
539
540#[derive(Debug, Clone, Copy)]
541pub(crate) struct DiagnosticEndpoint {
542 offset: usize,
543 is_start: bool,
544 underline: bool,
545 severity: DiagnosticSeverity,
546 is_unnecessary: bool,
547}
548
549/// A class of characters, used for characterizing a run of text.
550#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
551pub enum CharKind {
552 /// Whitespace.
553 Whitespace,
554 /// Punctuation.
555 Punctuation,
556 /// Word.
557 Word,
558}
559
560/// Context for character classification within a specific scope.
561#[derive(Copy, Clone, Eq, PartialEq, Debug)]
562pub enum CharScopeContext {
563 /// Character classification for completion queries.
564 ///
565 /// This context treats certain characters as word constituents that would
566 /// normally be considered punctuation, such as '-' in Tailwind classes
567 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
568 Completion,
569 /// Character classification for linked edits.
570 ///
571 /// This context handles characters that should be treated as part of
572 /// identifiers during linked editing operations, such as '.' in JSX
573 /// component names like `<Animated.View>`.
574 LinkedEdit,
575}
576
577/// A runnable is a set of data about a region that could be resolved into a task
578pub struct Runnable {
579 pub tags: SmallVec<[RunnableTag; 1]>,
580 pub language: Arc<Language>,
581 pub buffer: BufferId,
582}
583
584#[derive(Default, Clone, Debug)]
585pub struct HighlightedText {
586 pub text: SharedString,
587 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
588}
589
590#[derive(Default, Debug)]
591struct HighlightedTextBuilder {
592 pub text: String,
593 highlights: Vec<(Range<usize>, HighlightStyle)>,
594}
595
596impl HighlightedText {
597 pub fn from_buffer_range<T: ToOffset>(
598 range: Range<T>,
599 snapshot: &text::BufferSnapshot,
600 syntax_snapshot: &SyntaxSnapshot,
601 override_style: Option<HighlightStyle>,
602 syntax_theme: &SyntaxTheme,
603 ) -> Self {
604 let mut highlighted_text = HighlightedTextBuilder::default();
605 highlighted_text.add_text_from_buffer_range(
606 range,
607 snapshot,
608 syntax_snapshot,
609 override_style,
610 syntax_theme,
611 );
612 highlighted_text.build()
613 }
614
615 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
616 gpui::StyledText::new(self.text.clone())
617 .with_default_highlights(default_style, self.highlights.iter().cloned())
618 }
619
620 /// Returns the first line without leading whitespace unless highlighted
621 /// and a boolean indicating if there are more lines after
622 pub fn first_line_preview(self) -> (Self, bool) {
623 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
624 let first_line = &self.text[..newline_ix];
625
626 // Trim leading whitespace, unless an edit starts prior to it.
627 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
628 if let Some((first_highlight_range, _)) = self.highlights.first() {
629 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
630 }
631
632 let preview_text = &first_line[preview_start_ix..];
633 let preview_highlights = self
634 .highlights
635 .into_iter()
636 .skip_while(|(range, _)| range.end <= preview_start_ix)
637 .take_while(|(range, _)| range.start < newline_ix)
638 .filter_map(|(mut range, highlight)| {
639 range.start = range.start.saturating_sub(preview_start_ix);
640 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
641 if range.is_empty() {
642 None
643 } else {
644 Some((range, highlight))
645 }
646 });
647
648 let preview = Self {
649 text: SharedString::new(preview_text),
650 highlights: preview_highlights.collect(),
651 };
652
653 (preview, self.text.len() > newline_ix)
654 }
655}
656
657impl HighlightedTextBuilder {
658 pub fn build(self) -> HighlightedText {
659 HighlightedText {
660 text: self.text.into(),
661 highlights: self.highlights,
662 }
663 }
664
665 pub fn add_text_from_buffer_range<T: ToOffset>(
666 &mut self,
667 range: Range<T>,
668 snapshot: &text::BufferSnapshot,
669 syntax_snapshot: &SyntaxSnapshot,
670 override_style: Option<HighlightStyle>,
671 syntax_theme: &SyntaxTheme,
672 ) {
673 let range = range.to_offset(snapshot);
674 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
675 let start = self.text.len();
676 self.text.push_str(chunk.text);
677 let end = self.text.len();
678
679 if let Some(highlight_style) = chunk
680 .syntax_highlight_id
681 .and_then(|id| id.style(syntax_theme))
682 {
683 let highlight_style = override_style.map_or(highlight_style, |override_style| {
684 highlight_style.highlight(override_style)
685 });
686 self.highlights.push((start..end, highlight_style));
687 } else if let Some(override_style) = override_style {
688 self.highlights.push((start..end, override_style));
689 }
690 }
691 }
692
693 fn highlighted_chunks<'a>(
694 range: Range<usize>,
695 snapshot: &'a text::BufferSnapshot,
696 syntax_snapshot: &'a SyntaxSnapshot,
697 ) -> BufferChunks<'a> {
698 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
699 grammar
700 .highlights_config
701 .as_ref()
702 .map(|config| &config.query)
703 });
704
705 let highlight_maps = captures
706 .grammars()
707 .iter()
708 .map(|grammar| grammar.highlight_map())
709 .collect();
710
711 BufferChunks::new(
712 snapshot.as_rope(),
713 range,
714 Some((captures, highlight_maps)),
715 false,
716 None,
717 )
718 }
719}
720
721#[derive(Clone)]
722pub struct EditPreview {
723 old_snapshot: text::BufferSnapshot,
724 applied_edits_snapshot: text::BufferSnapshot,
725 syntax_snapshot: SyntaxSnapshot,
726}
727
728impl EditPreview {
729 pub fn highlight_edits(
730 &self,
731 current_snapshot: &BufferSnapshot,
732 edits: &[(Range<Anchor>, String)],
733 include_deletions: bool,
734 cx: &App,
735 ) -> HighlightedText {
736 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
737 return HighlightedText::default();
738 };
739
740 let mut highlighted_text = HighlightedTextBuilder::default();
741
742 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
743
744 let insertion_highlight_style = HighlightStyle {
745 background_color: Some(cx.theme().status().created_background),
746 ..Default::default()
747 };
748 let deletion_highlight_style = HighlightStyle {
749 background_color: Some(cx.theme().status().deleted_background),
750 ..Default::default()
751 };
752 let syntax_theme = cx.theme().syntax();
753
754 for (range, edit_text) in edits {
755 let edit_new_end_in_preview_snapshot = range
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_offset(&self.applied_edits_snapshot);
759 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
760
761 let unchanged_range_in_preview_snapshot =
762 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
763 if !unchanged_range_in_preview_snapshot.is_empty() {
764 highlighted_text.add_text_from_buffer_range(
765 unchanged_range_in_preview_snapshot,
766 &self.applied_edits_snapshot,
767 &self.syntax_snapshot,
768 None,
769 syntax_theme,
770 );
771 }
772
773 let range_in_current_snapshot = range.to_offset(current_snapshot);
774 if include_deletions && !range_in_current_snapshot.is_empty() {
775 highlighted_text.add_text_from_buffer_range(
776 range_in_current_snapshot,
777 ¤t_snapshot.text,
778 ¤t_snapshot.syntax,
779 Some(deletion_highlight_style),
780 syntax_theme,
781 );
782 }
783
784 if !edit_text.is_empty() {
785 highlighted_text.add_text_from_buffer_range(
786 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
787 &self.applied_edits_snapshot,
788 &self.syntax_snapshot,
789 Some(insertion_highlight_style),
790 syntax_theme,
791 );
792 }
793
794 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
795 }
796
797 highlighted_text.add_text_from_buffer_range(
798 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
799 &self.applied_edits_snapshot,
800 &self.syntax_snapshot,
801 None,
802 syntax_theme,
803 );
804
805 highlighted_text.build()
806 }
807
808 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
809 let (first, _) = edits.first()?;
810 let (last, _) = edits.last()?;
811
812 let start = first
813 .start
814 .bias_left(&self.old_snapshot)
815 .to_point(&self.applied_edits_snapshot);
816 let end = last
817 .end
818 .bias_right(&self.old_snapshot)
819 .to_point(&self.applied_edits_snapshot);
820
821 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
822 let range = Point::new(start.row, 0)
823 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
824
825 Some(range.to_offset(&self.applied_edits_snapshot))
826 }
827}
828
829#[derive(Clone, Debug, PartialEq, Eq)]
830pub struct BracketMatch {
831 pub open_range: Range<usize>,
832 pub close_range: Range<usize>,
833 pub newline_only: bool,
834}
835
836impl Buffer {
837 /// Create a new buffer with the given base text.
838 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
839 Self::build(
840 TextBuffer::new(
841 ReplicaId::LOCAL,
842 cx.entity_id().as_non_zero_u64().into(),
843 base_text.into(),
844 &cx.background_executor(),
845 ),
846 None,
847 Capability::ReadWrite,
848 )
849 }
850
851 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
852 /// change the buffer's editing state
853 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
854 self.text = new;
855 self.saved_version = self.version.clone();
856 self.has_unsaved_edits.set((self.version.clone(), false));
857
858 self.was_changed();
859 cx.emit(BufferEvent::DirtyChanged);
860 cx.notify();
861 }
862
863 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
864 pub fn local_normalized(
865 base_text_normalized: Rope,
866 line_ending: LineEnding,
867 cx: &Context<Self>,
868 ) -> Self {
869 Self::build(
870 TextBuffer::new_normalized(
871 ReplicaId::LOCAL,
872 cx.entity_id().as_non_zero_u64().into(),
873 line_ending,
874 base_text_normalized,
875 ),
876 None,
877 Capability::ReadWrite,
878 )
879 }
880
881 /// Create a new buffer that is a replica of a remote buffer.
882 pub fn remote(
883 remote_id: BufferId,
884 replica_id: ReplicaId,
885 capability: Capability,
886 base_text: impl Into<String>,
887 cx: &BackgroundExecutor,
888 ) -> Self {
889 Self::build(
890 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
891 None,
892 capability,
893 )
894 }
895
896 /// Create a new buffer that is a replica of a remote buffer, populating its
897 /// state from the given protobuf message.
898 pub fn from_proto(
899 replica_id: ReplicaId,
900 capability: Capability,
901 message: proto::BufferState,
902 file: Option<Arc<dyn File>>,
903 cx: &BackgroundExecutor,
904 ) -> Result<Self> {
905 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
906 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
907 let mut this = Self::build(buffer, file, capability);
908 this.text.set_line_ending(proto::deserialize_line_ending(
909 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
910 ));
911 this.saved_version = proto::deserialize_version(&message.saved_version);
912 this.saved_mtime = message.saved_mtime.map(|time| time.into());
913 Ok(this)
914 }
915
916 /// Serialize the buffer's state to a protobuf message.
917 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
918 proto::BufferState {
919 id: self.remote_id().into(),
920 file: self.file.as_ref().map(|f| f.to_proto(cx)),
921 base_text: self.base_text().to_string(),
922 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
923 saved_version: proto::serialize_version(&self.saved_version),
924 saved_mtime: self.saved_mtime.map(|time| time.into()),
925 }
926 }
927
928 /// Serialize as protobufs all of the changes to the buffer since the given version.
929 pub fn serialize_ops(
930 &self,
931 since: Option<clock::Global>,
932 cx: &App,
933 ) -> Task<Vec<proto::Operation>> {
934 let mut operations = Vec::new();
935 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
936
937 operations.extend(self.remote_selections.iter().map(|(_, set)| {
938 proto::serialize_operation(&Operation::UpdateSelections {
939 selections: set.selections.clone(),
940 lamport_timestamp: set.lamport_timestamp,
941 line_mode: set.line_mode,
942 cursor_shape: set.cursor_shape,
943 })
944 }));
945
946 for (server_id, diagnostics) in &self.diagnostics {
947 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
948 lamport_timestamp: self.diagnostics_timestamp,
949 server_id: *server_id,
950 diagnostics: diagnostics.iter().cloned().collect(),
951 }));
952 }
953
954 for (server_id, completions) in &self.completion_triggers_per_language_server {
955 operations.push(proto::serialize_operation(
956 &Operation::UpdateCompletionTriggers {
957 triggers: completions.iter().cloned().collect(),
958 lamport_timestamp: self.completion_triggers_timestamp,
959 server_id: *server_id,
960 },
961 ));
962 }
963
964 let text_operations = self.text.operations().clone();
965 cx.background_spawn(async move {
966 let since = since.unwrap_or_default();
967 operations.extend(
968 text_operations
969 .iter()
970 .filter(|(_, op)| !since.observed(op.timestamp()))
971 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
972 );
973 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
974 operations
975 })
976 }
977
978 /// Assign a language to the buffer, returning the buffer.
979 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
980 self.set_language(Some(language), cx);
981 self
982 }
983
984 /// Returns the [`Capability`] of this buffer.
985 pub fn capability(&self) -> Capability {
986 self.capability
987 }
988
989 /// Whether this buffer can only be read.
990 pub fn read_only(&self) -> bool {
991 self.capability == Capability::ReadOnly
992 }
993
994 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
995 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
996 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
997 let snapshot = buffer.snapshot();
998 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
999 Self {
1000 saved_mtime,
1001 saved_version: buffer.version(),
1002 preview_version: buffer.version(),
1003 reload_task: None,
1004 transaction_depth: 0,
1005 was_dirty_before_starting_transaction: None,
1006 has_unsaved_edits: Cell::new((buffer.version(), false)),
1007 text: buffer,
1008 branch_state: None,
1009 file,
1010 capability,
1011 syntax_map,
1012 reparse: None,
1013 non_text_state_update_count: 0,
1014 sync_parse_timeout: Duration::from_millis(1),
1015 parse_status: watch::channel(ParseStatus::Idle),
1016 autoindent_requests: Default::default(),
1017 wait_for_autoindent_txs: Default::default(),
1018 pending_autoindent: Default::default(),
1019 language: None,
1020 remote_selections: Default::default(),
1021 diagnostics: Default::default(),
1022 diagnostics_timestamp: Lamport::MIN,
1023 completion_triggers: Default::default(),
1024 completion_triggers_per_language_server: Default::default(),
1025 completion_triggers_timestamp: Lamport::MIN,
1026 deferred_ops: OperationQueue::new(),
1027 has_conflict: false,
1028 change_bits: Default::default(),
1029 _subscriptions: Vec::new(),
1030 encoding: Arc::new(std::sync::Mutex::new(encoding_rs::UTF_8)),
1031 }
1032 }
1033
1034 pub fn build_snapshot(
1035 text: Rope,
1036 language: Option<Arc<Language>>,
1037 language_registry: Option<Arc<LanguageRegistry>>,
1038 cx: &mut App,
1039 ) -> impl Future<Output = BufferSnapshot> + use<> {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 async move {
1043 let text =
1044 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1045 .snapshot();
1046 let mut syntax = SyntaxMap::new(&text).snapshot();
1047 if let Some(language) = language.clone() {
1048 let language_registry = language_registry.clone();
1049 syntax.reparse(&text, language_registry, language);
1050 }
1051 BufferSnapshot {
1052 text,
1053 syntax,
1054 file: None,
1055 diagnostics: Default::default(),
1056 remote_selections: Default::default(),
1057 language,
1058 non_text_state_update_count: 0,
1059 }
1060 }
1061 }
1062
1063 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1064 let entity_id = cx.reserve_entity::<Self>().entity_id();
1065 let buffer_id = entity_id.as_non_zero_u64().into();
1066 let text = TextBuffer::new_normalized(
1067 ReplicaId::LOCAL,
1068 buffer_id,
1069 Default::default(),
1070 Rope::new(),
1071 )
1072 .snapshot();
1073 let syntax = SyntaxMap::new(&text).snapshot();
1074 BufferSnapshot {
1075 text,
1076 syntax,
1077 file: None,
1078 diagnostics: Default::default(),
1079 remote_selections: Default::default(),
1080 language: None,
1081 non_text_state_update_count: 0,
1082 }
1083 }
1084
1085 #[cfg(any(test, feature = "test-support"))]
1086 pub fn build_snapshot_sync(
1087 text: Rope,
1088 language: Option<Arc<Language>>,
1089 language_registry: Option<Arc<LanguageRegistry>>,
1090 cx: &mut App,
1091 ) -> BufferSnapshot {
1092 let entity_id = cx.reserve_entity::<Self>().entity_id();
1093 let buffer_id = entity_id.as_non_zero_u64().into();
1094 let text =
1095 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1096 .snapshot();
1097 let mut syntax = SyntaxMap::new(&text).snapshot();
1098 if let Some(language) = language.clone() {
1099 syntax.reparse(&text, language_registry, language);
1100 }
1101 BufferSnapshot {
1102 text,
1103 syntax,
1104 file: None,
1105 diagnostics: Default::default(),
1106 remote_selections: Default::default(),
1107 language,
1108 non_text_state_update_count: 0,
1109 }
1110 }
1111
1112 /// Retrieve a snapshot of the buffer's current state. This is computationally
1113 /// cheap, and allows reading from the buffer on a background thread.
1114 pub fn snapshot(&self) -> BufferSnapshot {
1115 let text = self.text.snapshot();
1116 let mut syntax_map = self.syntax_map.lock();
1117 syntax_map.interpolate(&text);
1118 let syntax = syntax_map.snapshot();
1119
1120 BufferSnapshot {
1121 text,
1122 syntax,
1123 file: self.file.clone(),
1124 remote_selections: self.remote_selections.clone(),
1125 diagnostics: self.diagnostics.clone(),
1126 language: self.language.clone(),
1127 non_text_state_update_count: self.non_text_state_update_count,
1128 }
1129 }
1130
1131 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1132 let this = cx.entity();
1133 cx.new(|cx| {
1134 let mut branch = Self {
1135 branch_state: Some(BufferBranchState {
1136 base_buffer: this.clone(),
1137 merged_operations: Default::default(),
1138 }),
1139 language: self.language.clone(),
1140 has_conflict: self.has_conflict,
1141 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1142 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1143 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1144 };
1145 if let Some(language_registry) = self.language_registry() {
1146 branch.set_language_registry(language_registry);
1147 }
1148
1149 // Reparse the branch buffer so that we get syntax highlighting immediately.
1150 branch.reparse(cx);
1151
1152 branch
1153 })
1154 }
1155
1156 pub fn preview_edits(
1157 &self,
1158 edits: Arc<[(Range<Anchor>, String)]>,
1159 cx: &App,
1160 ) -> Task<EditPreview> {
1161 let registry = self.language_registry();
1162 let language = self.language().cloned();
1163 let old_snapshot = self.text.snapshot();
1164 let mut branch_buffer = self.text.branch();
1165 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1166 let executor = cx.background_executor().clone();
1167 cx.background_spawn(async move {
1168 if !edits.is_empty() {
1169 if let Some(language) = language.clone() {
1170 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1171 }
1172
1173 branch_buffer.edit(edits.iter().cloned(), &executor);
1174 let snapshot = branch_buffer.snapshot();
1175 syntax_snapshot.interpolate(&snapshot);
1176
1177 if let Some(language) = language {
1178 syntax_snapshot.reparse(&snapshot, registry, language);
1179 }
1180 }
1181 EditPreview {
1182 old_snapshot,
1183 applied_edits_snapshot: branch_buffer.snapshot(),
1184 syntax_snapshot,
1185 }
1186 })
1187 }
1188
1189 /// Applies all of the changes in this buffer that intersect any of the
1190 /// given `ranges` to its base buffer.
1191 ///
1192 /// If `ranges` is empty, then all changes will be applied. This buffer must
1193 /// be a branch buffer to call this method.
1194 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1195 let Some(base_buffer) = self.base_buffer() else {
1196 debug_panic!("not a branch buffer");
1197 return;
1198 };
1199
1200 let mut ranges = if ranges.is_empty() {
1201 &[0..usize::MAX]
1202 } else {
1203 ranges.as_slice()
1204 }
1205 .iter()
1206 .peekable();
1207
1208 let mut edits = Vec::new();
1209 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1210 let mut is_included = false;
1211 while let Some(range) = ranges.peek() {
1212 if range.end < edit.new.start {
1213 ranges.next().unwrap();
1214 } else {
1215 if range.start <= edit.new.end {
1216 is_included = true;
1217 }
1218 break;
1219 }
1220 }
1221
1222 if is_included {
1223 edits.push((
1224 edit.old.clone(),
1225 self.text_for_range(edit.new.clone()).collect::<String>(),
1226 ));
1227 }
1228 }
1229
1230 let operation = base_buffer.update(cx, |base_buffer, cx| {
1231 // cx.emit(BufferEvent::DiffBaseChanged);
1232 base_buffer.edit(edits, None, cx)
1233 });
1234
1235 if let Some(operation) = operation
1236 && let Some(BufferBranchState {
1237 merged_operations, ..
1238 }) = &mut self.branch_state
1239 {
1240 merged_operations.push(operation);
1241 }
1242 }
1243
1244 fn on_base_buffer_event(
1245 &mut self,
1246 _: Entity<Buffer>,
1247 event: &BufferEvent,
1248 cx: &mut Context<Self>,
1249 ) {
1250 let BufferEvent::Operation { operation, .. } = event else {
1251 return;
1252 };
1253 let Some(BufferBranchState {
1254 merged_operations, ..
1255 }) = &mut self.branch_state
1256 else {
1257 return;
1258 };
1259
1260 let mut operation_to_undo = None;
1261 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1262 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1263 {
1264 merged_operations.remove(ix);
1265 operation_to_undo = Some(operation.timestamp);
1266 }
1267
1268 self.apply_ops([operation.clone()], cx);
1269
1270 if let Some(timestamp) = operation_to_undo {
1271 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1272 self.undo_operations(counts, cx);
1273 }
1274 }
1275
1276 #[cfg(test)]
1277 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1278 &self.text
1279 }
1280
1281 /// Retrieve a snapshot of the buffer's raw text, without any
1282 /// language-related state like the syntax tree or diagnostics.
1283 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1284 self.text.snapshot()
1285 }
1286
1287 /// The file associated with the buffer, if any.
1288 pub fn file(&self) -> Option<&Arc<dyn File>> {
1289 self.file.as_ref()
1290 }
1291
1292 /// The version of the buffer that was last saved or reloaded from disk.
1293 pub fn saved_version(&self) -> &clock::Global {
1294 &self.saved_version
1295 }
1296
1297 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1298 pub fn saved_mtime(&self) -> Option<MTime> {
1299 self.saved_mtime
1300 }
1301
1302 /// Assign a language to the buffer.
1303 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1304 self.non_text_state_update_count += 1;
1305 self.syntax_map.lock().clear(&self.text);
1306 self.language = language;
1307 self.was_changed();
1308 self.reparse(cx);
1309 cx.emit(BufferEvent::LanguageChanged);
1310 }
1311
1312 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1313 /// other languages if parts of the buffer are written in different languages.
1314 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1315 self.syntax_map
1316 .lock()
1317 .set_language_registry(language_registry);
1318 }
1319
1320 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1321 self.syntax_map.lock().language_registry()
1322 }
1323
1324 /// Assign the line ending type to the buffer.
1325 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1326 self.text.set_line_ending(line_ending);
1327
1328 let lamport_timestamp = self.text.lamport_clock.tick();
1329 self.send_operation(
1330 Operation::UpdateLineEnding {
1331 line_ending,
1332 lamport_timestamp,
1333 },
1334 true,
1335 cx,
1336 );
1337 }
1338
1339 /// Assign the buffer a new [`Capability`].
1340 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1341 if self.capability != capability {
1342 self.capability = capability;
1343 cx.emit(BufferEvent::CapabilityChanged)
1344 }
1345 }
1346
1347 /// This method is called to signal that the buffer has been saved.
1348 pub fn did_save(
1349 &mut self,
1350 version: clock::Global,
1351 mtime: Option<MTime>,
1352 cx: &mut Context<Self>,
1353 ) {
1354 self.saved_version = version.clone();
1355 self.has_unsaved_edits.set((version, false));
1356 self.has_conflict = false;
1357 self.saved_mtime = mtime;
1358 self.was_changed();
1359 cx.emit(BufferEvent::Saved);
1360 cx.notify();
1361 }
1362
1363 /// Reloads the contents of the buffer from disk.
1364 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1365 let (tx, rx) = futures::channel::oneshot::channel();
1366 let encoding = EncodingWrapper::new(*(self.encoding.lock().unwrap()));
1367 let buffer_encoding = self.encoding.clone();
1368
1369 let prev_version = self.text.version();
1370 self.reload_task = Some(cx.spawn(async move |this, cx| {
1371 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1372 let file = this.file.as_ref()?.as_local()?;
1373 Some((file.disk_state().mtime(), {
1374 file.load(cx, encoding, false, true, Some(buffer_encoding))
1375 }))
1376 })?
1377 else {
1378 return Ok(());
1379 };
1380
1381 let new_text = new_text.await?;
1382 let diff = this
1383 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1384 .await;
1385 this.update(cx, |this, cx| {
1386 if this.version() == diff.base_version {
1387 this.finalize_last_transaction();
1388 this.apply_diff(diff, cx);
1389 tx.send(this.finalize_last_transaction().cloned()).ok();
1390 this.has_conflict = false;
1391 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1392 } else {
1393 if !diff.edits.is_empty()
1394 || this
1395 .edits_since::<usize>(&diff.base_version)
1396 .next()
1397 .is_some()
1398 {
1399 this.has_conflict = true;
1400 }
1401
1402 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1403 }
1404
1405 this.reload_task.take();
1406 })
1407 }));
1408 rx
1409 }
1410
1411 /// This method is called to signal that the buffer has been reloaded.
1412 pub fn did_reload(
1413 &mut self,
1414 version: clock::Global,
1415 line_ending: LineEnding,
1416 mtime: Option<MTime>,
1417 cx: &mut Context<Self>,
1418 ) {
1419 self.saved_version = version;
1420 self.has_unsaved_edits
1421 .set((self.saved_version.clone(), false));
1422 self.text.set_line_ending(line_ending);
1423 self.saved_mtime = mtime;
1424 cx.emit(BufferEvent::Reloaded);
1425 cx.notify();
1426 }
1427
1428 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1429 self.file = Some(new_file);
1430 }
1431 /// Updates the [`File`] backing this buffer. This should be called when
1432 /// the file has changed or has been deleted.
1433 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1434 let was_dirty = self.is_dirty();
1435 let mut file_changed = false;
1436
1437 if let Some(old_file) = self.file.as_ref() {
1438 if new_file.path() != old_file.path() {
1439 file_changed = true;
1440 }
1441
1442 let old_state = old_file.disk_state();
1443 let new_state = new_file.disk_state();
1444 if old_state != new_state {
1445 file_changed = true;
1446 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1447 cx.emit(BufferEvent::ReloadNeeded)
1448 }
1449 }
1450 } else {
1451 file_changed = true;
1452 };
1453
1454 self.file = Some(new_file);
1455 if file_changed {
1456 self.was_changed();
1457 self.non_text_state_update_count += 1;
1458 if was_dirty != self.is_dirty() {
1459 cx.emit(BufferEvent::DirtyChanged);
1460 }
1461 cx.emit(BufferEvent::FileHandleChanged);
1462 cx.notify();
1463 }
1464 }
1465
1466 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1467 Some(self.branch_state.as_ref()?.base_buffer.clone())
1468 }
1469
1470 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1471 pub fn language(&self) -> Option<&Arc<Language>> {
1472 self.language.as_ref()
1473 }
1474
1475 /// Returns the [`Language`] at the given location.
1476 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1477 let offset = position.to_offset(self);
1478 let mut is_first = true;
1479 let start_anchor = self.anchor_before(offset);
1480 let end_anchor = self.anchor_after(offset);
1481 self.syntax_map
1482 .lock()
1483 .layers_for_range(offset..offset, &self.text, false)
1484 .filter(|layer| {
1485 if is_first {
1486 is_first = false;
1487 return true;
1488 }
1489
1490 layer
1491 .included_sub_ranges
1492 .map(|sub_ranges| {
1493 sub_ranges.iter().any(|sub_range| {
1494 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1495 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1496 !is_before_start && !is_after_end
1497 })
1498 })
1499 .unwrap_or(true)
1500 })
1501 .last()
1502 .map(|info| info.language.clone())
1503 .or_else(|| self.language.clone())
1504 }
1505
1506 /// Returns each [`Language`] for the active syntax layers at the given location.
1507 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1508 let offset = position.to_offset(self);
1509 let mut languages: Vec<Arc<Language>> = self
1510 .syntax_map
1511 .lock()
1512 .layers_for_range(offset..offset, &self.text, false)
1513 .map(|info| info.language.clone())
1514 .collect();
1515
1516 if languages.is_empty()
1517 && let Some(buffer_language) = self.language()
1518 {
1519 languages.push(buffer_language.clone());
1520 }
1521
1522 languages
1523 }
1524
1525 /// An integer version number that accounts for all updates besides
1526 /// the buffer's text itself (which is versioned via a version vector).
1527 pub fn non_text_state_update_count(&self) -> usize {
1528 self.non_text_state_update_count
1529 }
1530
1531 /// Whether the buffer is being parsed in the background.
1532 #[cfg(any(test, feature = "test-support"))]
1533 pub fn is_parsing(&self) -> bool {
1534 self.reparse.is_some()
1535 }
1536
1537 /// Indicates whether the buffer contains any regions that may be
1538 /// written in a language that hasn't been loaded yet.
1539 pub fn contains_unknown_injections(&self) -> bool {
1540 self.syntax_map.lock().contains_unknown_injections()
1541 }
1542
1543 #[cfg(any(test, feature = "test-support"))]
1544 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1545 self.sync_parse_timeout = timeout;
1546 }
1547
1548 /// Called after an edit to synchronize the buffer's main parse tree with
1549 /// the buffer's new underlying state.
1550 ///
1551 /// Locks the syntax map and interpolates the edits since the last reparse
1552 /// into the foreground syntax tree.
1553 ///
1554 /// Then takes a stable snapshot of the syntax map before unlocking it.
1555 /// The snapshot with the interpolated edits is sent to a background thread,
1556 /// where we ask Tree-sitter to perform an incremental parse.
1557 ///
1558 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1559 /// waiting on the parse to complete. As soon as it completes, we proceed
1560 /// synchronously, unless a 1ms timeout elapses.
1561 ///
1562 /// If we time out waiting on the parse, we spawn a second task waiting
1563 /// until the parse does complete and return with the interpolated tree still
1564 /// in the foreground. When the background parse completes, call back into
1565 /// the main thread and assign the foreground parse state.
1566 ///
1567 /// If the buffer or grammar changed since the start of the background parse,
1568 /// initiate an additional reparse recursively. To avoid concurrent parses
1569 /// for the same buffer, we only initiate a new parse if we are not already
1570 /// parsing in the background.
1571 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1572 if self.reparse.is_some() {
1573 return;
1574 }
1575 let language = if let Some(language) = self.language.clone() {
1576 language
1577 } else {
1578 return;
1579 };
1580
1581 let text = self.text_snapshot();
1582 let parsed_version = self.version();
1583
1584 let mut syntax_map = self.syntax_map.lock();
1585 syntax_map.interpolate(&text);
1586 let language_registry = syntax_map.language_registry();
1587 let mut syntax_snapshot = syntax_map.snapshot();
1588 drop(syntax_map);
1589
1590 let parse_task = cx.background_spawn({
1591 let language = language.clone();
1592 let language_registry = language_registry.clone();
1593 async move {
1594 syntax_snapshot.reparse(&text, language_registry, language);
1595 syntax_snapshot
1596 }
1597 });
1598
1599 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1600 match cx
1601 .background_executor()
1602 .block_with_timeout(self.sync_parse_timeout, parse_task)
1603 {
1604 Ok(new_syntax_snapshot) => {
1605 self.did_finish_parsing(new_syntax_snapshot, cx);
1606 self.reparse = None;
1607 }
1608 Err(parse_task) => {
1609 // todo(lw): hot foreground spawn
1610 self.reparse = Some(cx.spawn(async move |this, cx| {
1611 let new_syntax_map = cx.background_spawn(parse_task).await;
1612 this.update(cx, move |this, cx| {
1613 let grammar_changed = || {
1614 this.language.as_ref().is_none_or(|current_language| {
1615 !Arc::ptr_eq(&language, current_language)
1616 })
1617 };
1618 let language_registry_changed = || {
1619 new_syntax_map.contains_unknown_injections()
1620 && language_registry.is_some_and(|registry| {
1621 registry.version() != new_syntax_map.language_registry_version()
1622 })
1623 };
1624 let parse_again = this.version.changed_since(&parsed_version)
1625 || language_registry_changed()
1626 || grammar_changed();
1627 this.did_finish_parsing(new_syntax_map, cx);
1628 this.reparse = None;
1629 if parse_again {
1630 this.reparse(cx);
1631 }
1632 })
1633 .ok();
1634 }));
1635 }
1636 }
1637 }
1638
1639 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1640 self.was_changed();
1641 self.non_text_state_update_count += 1;
1642 self.syntax_map.lock().did_parse(syntax_snapshot);
1643 self.request_autoindent(cx);
1644 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1645 cx.emit(BufferEvent::Reparsed);
1646 cx.notify();
1647 }
1648
1649 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1650 self.parse_status.1.clone()
1651 }
1652
1653 /// Assign to the buffer a set of diagnostics created by a given language server.
1654 pub fn update_diagnostics(
1655 &mut self,
1656 server_id: LanguageServerId,
1657 diagnostics: DiagnosticSet,
1658 cx: &mut Context<Self>,
1659 ) {
1660 let lamport_timestamp = self.text.lamport_clock.tick();
1661 let op = Operation::UpdateDiagnostics {
1662 server_id,
1663 diagnostics: diagnostics.iter().cloned().collect(),
1664 lamport_timestamp,
1665 };
1666
1667 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1668 self.send_operation(op, true, cx);
1669 }
1670
1671 pub fn buffer_diagnostics(
1672 &self,
1673 for_server: Option<LanguageServerId>,
1674 ) -> Vec<&DiagnosticEntry<Anchor>> {
1675 match for_server {
1676 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1677 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1678 Err(_) => Vec::new(),
1679 },
1680 None => self
1681 .diagnostics
1682 .iter()
1683 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1684 .collect(),
1685 }
1686 }
1687
1688 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1689 if let Some(indent_sizes) = self.compute_autoindents() {
1690 let indent_sizes = cx.background_spawn(indent_sizes);
1691 match cx
1692 .background_executor()
1693 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1694 {
1695 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1696 Err(indent_sizes) => {
1697 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1698 let indent_sizes = indent_sizes.await;
1699 this.update(cx, |this, cx| {
1700 this.apply_autoindents(indent_sizes, cx);
1701 })
1702 .ok();
1703 }));
1704 }
1705 }
1706 } else {
1707 self.autoindent_requests.clear();
1708 for tx in self.wait_for_autoindent_txs.drain(..) {
1709 tx.send(()).ok();
1710 }
1711 }
1712 }
1713
1714 fn compute_autoindents(
1715 &self,
1716 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1717 let max_rows_between_yields = 100;
1718 let snapshot = self.snapshot();
1719 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1720 return None;
1721 }
1722
1723 let autoindent_requests = self.autoindent_requests.clone();
1724 Some(async move {
1725 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1726 for request in autoindent_requests {
1727 // Resolve each edited range to its row in the current buffer and in the
1728 // buffer before this batch of edits.
1729 let mut row_ranges = Vec::new();
1730 let mut old_to_new_rows = BTreeMap::new();
1731 let mut language_indent_sizes_by_new_row = Vec::new();
1732 for entry in &request.entries {
1733 let position = entry.range.start;
1734 let new_row = position.to_point(&snapshot).row;
1735 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1736 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1737
1738 if !entry.first_line_is_new {
1739 let old_row = position.to_point(&request.before_edit).row;
1740 old_to_new_rows.insert(old_row, new_row);
1741 }
1742 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1743 }
1744
1745 // Build a map containing the suggested indentation for each of the edited lines
1746 // with respect to the state of the buffer before these edits. This map is keyed
1747 // by the rows for these lines in the current state of the buffer.
1748 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1749 let old_edited_ranges =
1750 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1751 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1752 let mut language_indent_size = IndentSize::default();
1753 for old_edited_range in old_edited_ranges {
1754 let suggestions = request
1755 .before_edit
1756 .suggest_autoindents(old_edited_range.clone())
1757 .into_iter()
1758 .flatten();
1759 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1760 if let Some(suggestion) = suggestion {
1761 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1762
1763 // Find the indent size based on the language for this row.
1764 while let Some((row, size)) = language_indent_sizes.peek() {
1765 if *row > new_row {
1766 break;
1767 }
1768 language_indent_size = *size;
1769 language_indent_sizes.next();
1770 }
1771
1772 let suggested_indent = old_to_new_rows
1773 .get(&suggestion.basis_row)
1774 .and_then(|from_row| {
1775 Some(old_suggestions.get(from_row).copied()?.0)
1776 })
1777 .unwrap_or_else(|| {
1778 request
1779 .before_edit
1780 .indent_size_for_line(suggestion.basis_row)
1781 })
1782 .with_delta(suggestion.delta, language_indent_size);
1783 old_suggestions
1784 .insert(new_row, (suggested_indent, suggestion.within_error));
1785 }
1786 }
1787 yield_now().await;
1788 }
1789
1790 // Compute new suggestions for each line, but only include them in the result
1791 // if they differ from the old suggestion for that line.
1792 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1793 let mut language_indent_size = IndentSize::default();
1794 for (row_range, original_indent_column) in row_ranges {
1795 let new_edited_row_range = if request.is_block_mode {
1796 row_range.start..row_range.start + 1
1797 } else {
1798 row_range.clone()
1799 };
1800
1801 let suggestions = snapshot
1802 .suggest_autoindents(new_edited_row_range.clone())
1803 .into_iter()
1804 .flatten();
1805 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1806 if let Some(suggestion) = suggestion {
1807 // Find the indent size based on the language for this row.
1808 while let Some((row, size)) = language_indent_sizes.peek() {
1809 if *row > new_row {
1810 break;
1811 }
1812 language_indent_size = *size;
1813 language_indent_sizes.next();
1814 }
1815
1816 let suggested_indent = indent_sizes
1817 .get(&suggestion.basis_row)
1818 .copied()
1819 .map(|e| e.0)
1820 .unwrap_or_else(|| {
1821 snapshot.indent_size_for_line(suggestion.basis_row)
1822 })
1823 .with_delta(suggestion.delta, language_indent_size);
1824
1825 if old_suggestions.get(&new_row).is_none_or(
1826 |(old_indentation, was_within_error)| {
1827 suggested_indent != *old_indentation
1828 && (!suggestion.within_error || *was_within_error)
1829 },
1830 ) {
1831 indent_sizes.insert(
1832 new_row,
1833 (suggested_indent, request.ignore_empty_lines),
1834 );
1835 }
1836 }
1837 }
1838
1839 if let (true, Some(original_indent_column)) =
1840 (request.is_block_mode, original_indent_column)
1841 {
1842 let new_indent =
1843 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1844 *indent
1845 } else {
1846 snapshot.indent_size_for_line(row_range.start)
1847 };
1848 let delta = new_indent.len as i64 - original_indent_column as i64;
1849 if delta != 0 {
1850 for row in row_range.skip(1) {
1851 indent_sizes.entry(row).or_insert_with(|| {
1852 let mut size = snapshot.indent_size_for_line(row);
1853 if size.kind == new_indent.kind {
1854 match delta.cmp(&0) {
1855 Ordering::Greater => size.len += delta as u32,
1856 Ordering::Less => {
1857 size.len = size.len.saturating_sub(-delta as u32)
1858 }
1859 Ordering::Equal => {}
1860 }
1861 }
1862 (size, request.ignore_empty_lines)
1863 });
1864 }
1865 }
1866 }
1867
1868 yield_now().await;
1869 }
1870 }
1871
1872 indent_sizes
1873 .into_iter()
1874 .filter_map(|(row, (indent, ignore_empty_lines))| {
1875 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1876 None
1877 } else {
1878 Some((row, indent))
1879 }
1880 })
1881 .collect()
1882 })
1883 }
1884
1885 fn apply_autoindents(
1886 &mut self,
1887 indent_sizes: BTreeMap<u32, IndentSize>,
1888 cx: &mut Context<Self>,
1889 ) {
1890 self.autoindent_requests.clear();
1891 for tx in self.wait_for_autoindent_txs.drain(..) {
1892 tx.send(()).ok();
1893 }
1894
1895 let edits: Vec<_> = indent_sizes
1896 .into_iter()
1897 .filter_map(|(row, indent_size)| {
1898 let current_size = indent_size_for_line(self, row);
1899 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1900 })
1901 .collect();
1902
1903 let preserve_preview = self.preserve_preview();
1904 self.edit(edits, None, cx);
1905 if preserve_preview {
1906 self.refresh_preview();
1907 }
1908 }
1909
1910 /// Create a minimal edit that will cause the given row to be indented
1911 /// with the given size. After applying this edit, the length of the line
1912 /// will always be at least `new_size.len`.
1913 pub fn edit_for_indent_size_adjustment(
1914 row: u32,
1915 current_size: IndentSize,
1916 new_size: IndentSize,
1917 ) -> Option<(Range<Point>, String)> {
1918 if new_size.kind == current_size.kind {
1919 match new_size.len.cmp(¤t_size.len) {
1920 Ordering::Greater => {
1921 let point = Point::new(row, 0);
1922 Some((
1923 point..point,
1924 iter::repeat(new_size.char())
1925 .take((new_size.len - current_size.len) as usize)
1926 .collect::<String>(),
1927 ))
1928 }
1929
1930 Ordering::Less => Some((
1931 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1932 String::new(),
1933 )),
1934
1935 Ordering::Equal => None,
1936 }
1937 } else {
1938 Some((
1939 Point::new(row, 0)..Point::new(row, current_size.len),
1940 iter::repeat(new_size.char())
1941 .take(new_size.len as usize)
1942 .collect::<String>(),
1943 ))
1944 }
1945 }
1946
1947 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1948 /// and the given new text.
1949 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1950 let old_text = self.as_rope().clone();
1951 let base_version = self.version();
1952 cx.background_executor()
1953 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1954 let old_text = old_text.to_string();
1955 let line_ending = LineEnding::detect(&new_text);
1956 LineEnding::normalize(&mut new_text);
1957 let edits = text_diff(&old_text, &new_text);
1958 Diff {
1959 base_version,
1960 line_ending,
1961 edits,
1962 }
1963 })
1964 }
1965
1966 /// Spawns a background task that searches the buffer for any whitespace
1967 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1968 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1969 let old_text = self.as_rope().clone();
1970 let line_ending = self.line_ending();
1971 let base_version = self.version();
1972 cx.background_spawn(async move {
1973 let ranges = trailing_whitespace_ranges(&old_text);
1974 let empty = Arc::<str>::from("");
1975 Diff {
1976 base_version,
1977 line_ending,
1978 edits: ranges
1979 .into_iter()
1980 .map(|range| (range, empty.clone()))
1981 .collect(),
1982 }
1983 })
1984 }
1985
1986 /// Ensures that the buffer ends with a single newline character, and
1987 /// no other whitespace. Skips if the buffer is empty.
1988 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1989 let len = self.len();
1990 if len == 0 {
1991 return;
1992 }
1993 let mut offset = len;
1994 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1995 let non_whitespace_len = chunk
1996 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1997 .len();
1998 offset -= chunk.len();
1999 offset += non_whitespace_len;
2000 if non_whitespace_len != 0 {
2001 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2002 return;
2003 }
2004 break;
2005 }
2006 }
2007 self.edit([(offset..len, "\n")], None, cx);
2008 }
2009
2010 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2011 /// calculated, then adjust the diff to account for those changes, and discard any
2012 /// parts of the diff that conflict with those changes.
2013 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2014 let snapshot = self.snapshot();
2015 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2016 let mut delta = 0;
2017 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2018 while let Some(edit_since) = edits_since.peek() {
2019 // If the edit occurs after a diff hunk, then it does not
2020 // affect that hunk.
2021 if edit_since.old.start > range.end {
2022 break;
2023 }
2024 // If the edit precedes the diff hunk, then adjust the hunk
2025 // to reflect the edit.
2026 else if edit_since.old.end < range.start {
2027 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2028 edits_since.next();
2029 }
2030 // If the edit intersects a diff hunk, then discard that hunk.
2031 else {
2032 return None;
2033 }
2034 }
2035
2036 let start = (range.start as i64 + delta) as usize;
2037 let end = (range.end as i64 + delta) as usize;
2038 Some((start..end, new_text))
2039 });
2040
2041 self.start_transaction();
2042 self.text.set_line_ending(diff.line_ending);
2043 self.edit(adjusted_edits, None, cx);
2044 self.end_transaction(cx)
2045 }
2046
2047 pub fn has_unsaved_edits(&self) -> bool {
2048 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2049
2050 if last_version == self.version {
2051 self.has_unsaved_edits
2052 .set((last_version, has_unsaved_edits));
2053 return has_unsaved_edits;
2054 }
2055
2056 let has_edits = self.has_edits_since(&self.saved_version);
2057 self.has_unsaved_edits
2058 .set((self.version.clone(), has_edits));
2059 has_edits
2060 }
2061
2062 /// Checks if the buffer has unsaved changes.
2063 pub fn is_dirty(&self) -> bool {
2064 if self.capability == Capability::ReadOnly {
2065 return false;
2066 }
2067 if self.has_conflict {
2068 return true;
2069 }
2070 match self.file.as_ref().map(|f| f.disk_state()) {
2071 Some(DiskState::New) | Some(DiskState::Deleted) => {
2072 !self.is_empty() && self.has_unsaved_edits()
2073 }
2074 _ => self.has_unsaved_edits(),
2075 }
2076 }
2077
2078 /// Checks if the buffer and its file have both changed since the buffer
2079 /// was last saved or reloaded.
2080 pub fn has_conflict(&self) -> bool {
2081 if self.has_conflict {
2082 return true;
2083 }
2084 let Some(file) = self.file.as_ref() else {
2085 return false;
2086 };
2087 match file.disk_state() {
2088 DiskState::New => false,
2089 DiskState::Present { mtime } => match self.saved_mtime {
2090 Some(saved_mtime) => {
2091 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2092 }
2093 None => true,
2094 },
2095 DiskState::Deleted => false,
2096 }
2097 }
2098
2099 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2100 pub fn subscribe(&mut self) -> Subscription {
2101 self.text.subscribe()
2102 }
2103
2104 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2105 ///
2106 /// This allows downstream code to check if the buffer's text has changed without
2107 /// waiting for an effect cycle, which would be required if using eents.
2108 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2109 if let Err(ix) = self
2110 .change_bits
2111 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2112 {
2113 self.change_bits.insert(ix, bit);
2114 }
2115 }
2116
2117 /// Set the change bit for all "listeners".
2118 fn was_changed(&mut self) {
2119 self.change_bits.retain(|change_bit| {
2120 change_bit
2121 .upgrade()
2122 .inspect(|bit| {
2123 _ = bit.replace(true);
2124 })
2125 .is_some()
2126 });
2127 }
2128
2129 /// Starts a transaction, if one is not already in-progress. When undoing or
2130 /// redoing edits, all of the edits performed within a transaction are undone
2131 /// or redone together.
2132 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2133 self.start_transaction_at(Instant::now())
2134 }
2135
2136 /// Starts a transaction, providing the current time. Subsequent transactions
2137 /// that occur within a short period of time will be grouped together. This
2138 /// is controlled by the buffer's undo grouping duration.
2139 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2140 self.transaction_depth += 1;
2141 if self.was_dirty_before_starting_transaction.is_none() {
2142 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2143 }
2144 self.text.start_transaction_at(now)
2145 }
2146
2147 /// Terminates the current transaction, if this is the outermost transaction.
2148 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2149 self.end_transaction_at(Instant::now(), cx)
2150 }
2151
2152 /// Terminates the current transaction, providing the current time. Subsequent transactions
2153 /// that occur within a short period of time will be grouped together. This
2154 /// is controlled by the buffer's undo grouping duration.
2155 pub fn end_transaction_at(
2156 &mut self,
2157 now: Instant,
2158 cx: &mut Context<Self>,
2159 ) -> Option<TransactionId> {
2160 assert!(self.transaction_depth > 0);
2161 self.transaction_depth -= 1;
2162 let was_dirty = if self.transaction_depth == 0 {
2163 self.was_dirty_before_starting_transaction.take().unwrap()
2164 } else {
2165 false
2166 };
2167 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2168 self.did_edit(&start_version, was_dirty, cx);
2169 Some(transaction_id)
2170 } else {
2171 None
2172 }
2173 }
2174
2175 /// Manually add a transaction to the buffer's undo history.
2176 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2177 self.text.push_transaction(transaction, now);
2178 }
2179
2180 /// Differs from `push_transaction` in that it does not clear the redo
2181 /// stack. Intended to be used to create a parent transaction to merge
2182 /// potential child transactions into.
2183 ///
2184 /// The caller is responsible for removing it from the undo history using
2185 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2186 /// are merged into this transaction, the caller is responsible for ensuring
2187 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2188 /// cleared is to create transactions with the usual `start_transaction` and
2189 /// `end_transaction` methods and merging the resulting transactions into
2190 /// the transaction created by this method
2191 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2192 self.text.push_empty_transaction(now)
2193 }
2194
2195 /// Prevent the last transaction from being grouped with any subsequent transactions,
2196 /// even if they occur with the buffer's undo grouping duration.
2197 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2198 self.text.finalize_last_transaction()
2199 }
2200
2201 /// Manually group all changes since a given transaction.
2202 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2203 self.text.group_until_transaction(transaction_id);
2204 }
2205
2206 /// Manually remove a transaction from the buffer's undo history
2207 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2208 self.text.forget_transaction(transaction_id)
2209 }
2210
2211 /// Retrieve a transaction from the buffer's undo history
2212 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2213 self.text.get_transaction(transaction_id)
2214 }
2215
2216 /// Manually merge two transactions in the buffer's undo history.
2217 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2218 self.text.merge_transactions(transaction, destination);
2219 }
2220
2221 /// Waits for the buffer to receive operations with the given timestamps.
2222 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2223 &mut self,
2224 edit_ids: It,
2225 ) -> impl Future<Output = Result<()>> + use<It> {
2226 self.text.wait_for_edits(edit_ids)
2227 }
2228
2229 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2230 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2231 &mut self,
2232 anchors: It,
2233 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2234 self.text.wait_for_anchors(anchors)
2235 }
2236
2237 /// Waits for the buffer to receive operations up to the given version.
2238 pub fn wait_for_version(
2239 &mut self,
2240 version: clock::Global,
2241 ) -> impl Future<Output = Result<()>> + use<> {
2242 self.text.wait_for_version(version)
2243 }
2244
2245 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2246 /// [`Buffer::wait_for_version`] to resolve with an error.
2247 pub fn give_up_waiting(&mut self) {
2248 self.text.give_up_waiting();
2249 }
2250
2251 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2252 let mut rx = None;
2253 if !self.autoindent_requests.is_empty() {
2254 let channel = oneshot::channel();
2255 self.wait_for_autoindent_txs.push(channel.0);
2256 rx = Some(channel.1);
2257 }
2258 rx
2259 }
2260
2261 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2262 pub fn set_active_selections(
2263 &mut self,
2264 selections: Arc<[Selection<Anchor>]>,
2265 line_mode: bool,
2266 cursor_shape: CursorShape,
2267 cx: &mut Context<Self>,
2268 ) {
2269 let lamport_timestamp = self.text.lamport_clock.tick();
2270 self.remote_selections.insert(
2271 self.text.replica_id(),
2272 SelectionSet {
2273 selections: selections.clone(),
2274 lamport_timestamp,
2275 line_mode,
2276 cursor_shape,
2277 },
2278 );
2279 self.send_operation(
2280 Operation::UpdateSelections {
2281 selections,
2282 line_mode,
2283 lamport_timestamp,
2284 cursor_shape,
2285 },
2286 true,
2287 cx,
2288 );
2289 self.non_text_state_update_count += 1;
2290 cx.notify();
2291 }
2292
2293 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2294 /// this replica.
2295 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2296 if self
2297 .remote_selections
2298 .get(&self.text.replica_id())
2299 .is_none_or(|set| !set.selections.is_empty())
2300 {
2301 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2302 }
2303 }
2304
2305 pub fn set_agent_selections(
2306 &mut self,
2307 selections: Arc<[Selection<Anchor>]>,
2308 line_mode: bool,
2309 cursor_shape: CursorShape,
2310 cx: &mut Context<Self>,
2311 ) {
2312 let lamport_timestamp = self.text.lamport_clock.tick();
2313 self.remote_selections.insert(
2314 ReplicaId::AGENT,
2315 SelectionSet {
2316 selections,
2317 lamport_timestamp,
2318 line_mode,
2319 cursor_shape,
2320 },
2321 );
2322 self.non_text_state_update_count += 1;
2323 cx.notify();
2324 }
2325
2326 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2327 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2328 }
2329
2330 /// Replaces the buffer's entire text.
2331 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2332 where
2333 T: Into<Arc<str>>,
2334 {
2335 self.autoindent_requests.clear();
2336 self.edit([(0..self.len(), text)], None, cx)
2337 }
2338
2339 /// Appends the given text to the end of the buffer.
2340 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2341 where
2342 T: Into<Arc<str>>,
2343 {
2344 self.edit([(self.len()..self.len(), text)], None, cx)
2345 }
2346
2347 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2348 /// delete, and a string of text to insert at that location.
2349 ///
2350 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2351 /// request for the edited ranges, which will be processed when the buffer finishes
2352 /// parsing.
2353 ///
2354 /// Parsing takes place at the end of a transaction, and may compute synchronously
2355 /// or asynchronously, depending on the changes.
2356 pub fn edit<I, S, T>(
2357 &mut self,
2358 edits_iter: I,
2359 autoindent_mode: Option<AutoindentMode>,
2360 cx: &mut Context<Self>,
2361 ) -> Option<clock::Lamport>
2362 where
2363 I: IntoIterator<Item = (Range<S>, T)>,
2364 S: ToOffset,
2365 T: Into<Arc<str>>,
2366 {
2367 // Skip invalid edits and coalesce contiguous ones.
2368 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2369
2370 for (range, new_text) in edits_iter {
2371 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2372
2373 if range.start > range.end {
2374 mem::swap(&mut range.start, &mut range.end);
2375 }
2376 let new_text = new_text.into();
2377 if !new_text.is_empty() || !range.is_empty() {
2378 if let Some((prev_range, prev_text)) = edits.last_mut()
2379 && prev_range.end >= range.start
2380 {
2381 prev_range.end = cmp::max(prev_range.end, range.end);
2382 *prev_text = format!("{prev_text}{new_text}").into();
2383 } else {
2384 edits.push((range, new_text));
2385 }
2386 }
2387 }
2388 if edits.is_empty() {
2389 return None;
2390 }
2391
2392 self.start_transaction();
2393 self.pending_autoindent.take();
2394 let autoindent_request = autoindent_mode
2395 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2396
2397 let edit_operation = self
2398 .text
2399 .edit(edits.iter().cloned(), cx.background_executor());
2400 let edit_id = edit_operation.timestamp();
2401
2402 if let Some((before_edit, mode)) = autoindent_request {
2403 let mut delta = 0isize;
2404 let mut previous_setting = None;
2405 let entries: Vec<_> = edits
2406 .into_iter()
2407 .enumerate()
2408 .zip(&edit_operation.as_edit().unwrap().new_text)
2409 .filter(|((_, (range, _)), _)| {
2410 let language = before_edit.language_at(range.start);
2411 let language_id = language.map(|l| l.id());
2412 if let Some((cached_language_id, auto_indent)) = previous_setting
2413 && cached_language_id == language_id
2414 {
2415 auto_indent
2416 } else {
2417 // The auto-indent setting is not present in editorconfigs, hence
2418 // we can avoid passing the file here.
2419 let auto_indent =
2420 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2421 previous_setting = Some((language_id, auto_indent));
2422 auto_indent
2423 }
2424 })
2425 .map(|((ix, (range, _)), new_text)| {
2426 let new_text_length = new_text.len();
2427 let old_start = range.start.to_point(&before_edit);
2428 let new_start = (delta + range.start as isize) as usize;
2429 let range_len = range.end - range.start;
2430 delta += new_text_length as isize - range_len as isize;
2431
2432 // Decide what range of the insertion to auto-indent, and whether
2433 // the first line of the insertion should be considered a newly-inserted line
2434 // or an edit to an existing line.
2435 let mut range_of_insertion_to_indent = 0..new_text_length;
2436 let mut first_line_is_new = true;
2437
2438 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2439 let old_line_end = before_edit.line_len(old_start.row);
2440
2441 if old_start.column > old_line_start {
2442 first_line_is_new = false;
2443 }
2444
2445 if !new_text.contains('\n')
2446 && (old_start.column + (range_len as u32) < old_line_end
2447 || old_line_end == old_line_start)
2448 {
2449 first_line_is_new = false;
2450 }
2451
2452 // When inserting text starting with a newline, avoid auto-indenting the
2453 // previous line.
2454 if new_text.starts_with('\n') {
2455 range_of_insertion_to_indent.start += 1;
2456 first_line_is_new = true;
2457 }
2458
2459 let mut original_indent_column = None;
2460 if let AutoindentMode::Block {
2461 original_indent_columns,
2462 } = &mode
2463 {
2464 original_indent_column = Some(if new_text.starts_with('\n') {
2465 indent_size_for_text(
2466 new_text[range_of_insertion_to_indent.clone()].chars(),
2467 )
2468 .len
2469 } else {
2470 original_indent_columns
2471 .get(ix)
2472 .copied()
2473 .flatten()
2474 .unwrap_or_else(|| {
2475 indent_size_for_text(
2476 new_text[range_of_insertion_to_indent.clone()].chars(),
2477 )
2478 .len
2479 })
2480 });
2481
2482 // Avoid auto-indenting the line after the edit.
2483 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2484 range_of_insertion_to_indent.end -= 1;
2485 }
2486 }
2487
2488 AutoindentRequestEntry {
2489 first_line_is_new,
2490 original_indent_column,
2491 indent_size: before_edit.language_indent_size_at(range.start, cx),
2492 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2493 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2494 }
2495 })
2496 .collect();
2497
2498 if !entries.is_empty() {
2499 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2500 before_edit,
2501 entries,
2502 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2503 ignore_empty_lines: false,
2504 }));
2505 }
2506 }
2507
2508 self.end_transaction(cx);
2509 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2510 Some(edit_id)
2511 }
2512
2513 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2514 self.was_changed();
2515
2516 if self.edits_since::<usize>(old_version).next().is_none() {
2517 return;
2518 }
2519
2520 self.reparse(cx);
2521 cx.emit(BufferEvent::Edited);
2522 if was_dirty != self.is_dirty() {
2523 cx.emit(BufferEvent::DirtyChanged);
2524 }
2525 cx.notify();
2526 }
2527
2528 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2529 where
2530 I: IntoIterator<Item = Range<T>>,
2531 T: ToOffset + Copy,
2532 {
2533 let before_edit = self.snapshot();
2534 let entries = ranges
2535 .into_iter()
2536 .map(|range| AutoindentRequestEntry {
2537 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2538 first_line_is_new: true,
2539 indent_size: before_edit.language_indent_size_at(range.start, cx),
2540 original_indent_column: None,
2541 })
2542 .collect();
2543 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2544 before_edit,
2545 entries,
2546 is_block_mode: false,
2547 ignore_empty_lines: true,
2548 }));
2549 self.request_autoindent(cx);
2550 }
2551
2552 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2553 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2554 pub fn insert_empty_line(
2555 &mut self,
2556 position: impl ToPoint,
2557 space_above: bool,
2558 space_below: bool,
2559 cx: &mut Context<Self>,
2560 ) -> Point {
2561 let mut position = position.to_point(self);
2562
2563 self.start_transaction();
2564
2565 self.edit(
2566 [(position..position, "\n")],
2567 Some(AutoindentMode::EachLine),
2568 cx,
2569 );
2570
2571 if position.column > 0 {
2572 position += Point::new(1, 0);
2573 }
2574
2575 if !self.is_line_blank(position.row) {
2576 self.edit(
2577 [(position..position, "\n")],
2578 Some(AutoindentMode::EachLine),
2579 cx,
2580 );
2581 }
2582
2583 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2584 self.edit(
2585 [(position..position, "\n")],
2586 Some(AutoindentMode::EachLine),
2587 cx,
2588 );
2589 position.row += 1;
2590 }
2591
2592 if space_below
2593 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2594 {
2595 self.edit(
2596 [(position..position, "\n")],
2597 Some(AutoindentMode::EachLine),
2598 cx,
2599 );
2600 }
2601
2602 self.end_transaction(cx);
2603
2604 position
2605 }
2606
2607 /// Applies the given remote operations to the buffer.
2608 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2609 self.pending_autoindent.take();
2610 let was_dirty = self.is_dirty();
2611 let old_version = self.version.clone();
2612 let mut deferred_ops = Vec::new();
2613 let buffer_ops = ops
2614 .into_iter()
2615 .filter_map(|op| match op {
2616 Operation::Buffer(op) => Some(op),
2617 _ => {
2618 if self.can_apply_op(&op) {
2619 self.apply_op(op, cx);
2620 } else {
2621 deferred_ops.push(op);
2622 }
2623 None
2624 }
2625 })
2626 .collect::<Vec<_>>();
2627 for operation in buffer_ops.iter() {
2628 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2629 }
2630 self.text
2631 .apply_ops(buffer_ops, Some(cx.background_executor()));
2632 self.deferred_ops.insert(deferred_ops);
2633 self.flush_deferred_ops(cx);
2634 self.did_edit(&old_version, was_dirty, cx);
2635 // Notify independently of whether the buffer was edited as the operations could include a
2636 // selection update.
2637 cx.notify();
2638 }
2639
2640 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2641 let mut deferred_ops = Vec::new();
2642 for op in self.deferred_ops.drain().iter().cloned() {
2643 if self.can_apply_op(&op) {
2644 self.apply_op(op, cx);
2645 } else {
2646 deferred_ops.push(op);
2647 }
2648 }
2649 self.deferred_ops.insert(deferred_ops);
2650 }
2651
2652 pub fn has_deferred_ops(&self) -> bool {
2653 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2654 }
2655
2656 fn can_apply_op(&self, operation: &Operation) -> bool {
2657 match operation {
2658 Operation::Buffer(_) => {
2659 unreachable!("buffer operations should never be applied at this layer")
2660 }
2661 Operation::UpdateDiagnostics {
2662 diagnostics: diagnostic_set,
2663 ..
2664 } => diagnostic_set.iter().all(|diagnostic| {
2665 self.text.can_resolve(&diagnostic.range.start)
2666 && self.text.can_resolve(&diagnostic.range.end)
2667 }),
2668 Operation::UpdateSelections { selections, .. } => selections
2669 .iter()
2670 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2671 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2672 }
2673 }
2674
2675 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2676 match operation {
2677 Operation::Buffer(_) => {
2678 unreachable!("buffer operations should never be applied at this layer")
2679 }
2680 Operation::UpdateDiagnostics {
2681 server_id,
2682 diagnostics: diagnostic_set,
2683 lamport_timestamp,
2684 } => {
2685 let snapshot = self.snapshot();
2686 self.apply_diagnostic_update(
2687 server_id,
2688 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2689 lamport_timestamp,
2690 cx,
2691 );
2692 }
2693 Operation::UpdateSelections {
2694 selections,
2695 lamport_timestamp,
2696 line_mode,
2697 cursor_shape,
2698 } => {
2699 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2700 && set.lamport_timestamp > lamport_timestamp
2701 {
2702 return;
2703 }
2704
2705 self.remote_selections.insert(
2706 lamport_timestamp.replica_id,
2707 SelectionSet {
2708 selections,
2709 lamport_timestamp,
2710 line_mode,
2711 cursor_shape,
2712 },
2713 );
2714 self.text.lamport_clock.observe(lamport_timestamp);
2715 self.non_text_state_update_count += 1;
2716 }
2717 Operation::UpdateCompletionTriggers {
2718 triggers,
2719 lamport_timestamp,
2720 server_id,
2721 } => {
2722 if triggers.is_empty() {
2723 self.completion_triggers_per_language_server
2724 .remove(&server_id);
2725 self.completion_triggers = self
2726 .completion_triggers_per_language_server
2727 .values()
2728 .flat_map(|triggers| triggers.iter().cloned())
2729 .collect();
2730 } else {
2731 self.completion_triggers_per_language_server
2732 .insert(server_id, triggers.iter().cloned().collect());
2733 self.completion_triggers.extend(triggers);
2734 }
2735 self.text.lamport_clock.observe(lamport_timestamp);
2736 }
2737 Operation::UpdateLineEnding {
2738 line_ending,
2739 lamport_timestamp,
2740 } => {
2741 self.text.set_line_ending(line_ending);
2742 self.text.lamport_clock.observe(lamport_timestamp);
2743 }
2744 }
2745 }
2746
2747 fn apply_diagnostic_update(
2748 &mut self,
2749 server_id: LanguageServerId,
2750 diagnostics: DiagnosticSet,
2751 lamport_timestamp: clock::Lamport,
2752 cx: &mut Context<Self>,
2753 ) {
2754 if lamport_timestamp > self.diagnostics_timestamp {
2755 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2756 if diagnostics.is_empty() {
2757 if let Ok(ix) = ix {
2758 self.diagnostics.remove(ix);
2759 }
2760 } else {
2761 match ix {
2762 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2763 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2764 };
2765 }
2766 self.diagnostics_timestamp = lamport_timestamp;
2767 self.non_text_state_update_count += 1;
2768 self.text.lamport_clock.observe(lamport_timestamp);
2769 cx.notify();
2770 cx.emit(BufferEvent::DiagnosticsUpdated);
2771 }
2772 }
2773
2774 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2775 self.was_changed();
2776 cx.emit(BufferEvent::Operation {
2777 operation,
2778 is_local,
2779 });
2780 }
2781
2782 /// Removes the selections for a given peer.
2783 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2784 self.remote_selections.remove(&replica_id);
2785 cx.notify();
2786 }
2787
2788 /// Undoes the most recent transaction.
2789 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2790 let was_dirty = self.is_dirty();
2791 let old_version = self.version.clone();
2792
2793 if let Some((transaction_id, operation)) = self.text.undo() {
2794 self.send_operation(Operation::Buffer(operation), true, cx);
2795 self.did_edit(&old_version, was_dirty, cx);
2796 Some(transaction_id)
2797 } else {
2798 None
2799 }
2800 }
2801
2802 /// Manually undoes a specific transaction in the buffer's undo history.
2803 pub fn undo_transaction(
2804 &mut self,
2805 transaction_id: TransactionId,
2806 cx: &mut Context<Self>,
2807 ) -> bool {
2808 let was_dirty = self.is_dirty();
2809 let old_version = self.version.clone();
2810 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2811 self.send_operation(Operation::Buffer(operation), true, cx);
2812 self.did_edit(&old_version, was_dirty, cx);
2813 true
2814 } else {
2815 false
2816 }
2817 }
2818
2819 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2820 pub fn undo_to_transaction(
2821 &mut self,
2822 transaction_id: TransactionId,
2823 cx: &mut Context<Self>,
2824 ) -> bool {
2825 let was_dirty = self.is_dirty();
2826 let old_version = self.version.clone();
2827
2828 let operations = self.text.undo_to_transaction(transaction_id);
2829 let undone = !operations.is_empty();
2830 for operation in operations {
2831 self.send_operation(Operation::Buffer(operation), true, cx);
2832 }
2833 if undone {
2834 self.did_edit(&old_version, was_dirty, cx)
2835 }
2836 undone
2837 }
2838
2839 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2840 let was_dirty = self.is_dirty();
2841 let operation = self.text.undo_operations(counts);
2842 let old_version = self.version.clone();
2843 self.send_operation(Operation::Buffer(operation), true, cx);
2844 self.did_edit(&old_version, was_dirty, cx);
2845 }
2846
2847 /// Manually redoes a specific transaction in the buffer's redo history.
2848 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2849 let was_dirty = self.is_dirty();
2850 let old_version = self.version.clone();
2851
2852 if let Some((transaction_id, operation)) = self.text.redo() {
2853 self.send_operation(Operation::Buffer(operation), true, cx);
2854 self.did_edit(&old_version, was_dirty, cx);
2855 Some(transaction_id)
2856 } else {
2857 None
2858 }
2859 }
2860
2861 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2862 pub fn redo_to_transaction(
2863 &mut self,
2864 transaction_id: TransactionId,
2865 cx: &mut Context<Self>,
2866 ) -> bool {
2867 let was_dirty = self.is_dirty();
2868 let old_version = self.version.clone();
2869
2870 let operations = self.text.redo_to_transaction(transaction_id);
2871 let redone = !operations.is_empty();
2872 for operation in operations {
2873 self.send_operation(Operation::Buffer(operation), true, cx);
2874 }
2875 if redone {
2876 self.did_edit(&old_version, was_dirty, cx)
2877 }
2878 redone
2879 }
2880
2881 /// Override current completion triggers with the user-provided completion triggers.
2882 pub fn set_completion_triggers(
2883 &mut self,
2884 server_id: LanguageServerId,
2885 triggers: BTreeSet<String>,
2886 cx: &mut Context<Self>,
2887 ) {
2888 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2889 if triggers.is_empty() {
2890 self.completion_triggers_per_language_server
2891 .remove(&server_id);
2892 self.completion_triggers = self
2893 .completion_triggers_per_language_server
2894 .values()
2895 .flat_map(|triggers| triggers.iter().cloned())
2896 .collect();
2897 } else {
2898 self.completion_triggers_per_language_server
2899 .insert(server_id, triggers.clone());
2900 self.completion_triggers.extend(triggers.iter().cloned());
2901 }
2902 self.send_operation(
2903 Operation::UpdateCompletionTriggers {
2904 triggers: triggers.into_iter().collect(),
2905 lamport_timestamp: self.completion_triggers_timestamp,
2906 server_id,
2907 },
2908 true,
2909 cx,
2910 );
2911 cx.notify();
2912 }
2913
2914 /// Returns a list of strings which trigger a completion menu for this language.
2915 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2916 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2917 &self.completion_triggers
2918 }
2919
2920 /// Call this directly after performing edits to prevent the preview tab
2921 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2922 /// to return false until there are additional edits.
2923 pub fn refresh_preview(&mut self) {
2924 self.preview_version = self.version.clone();
2925 }
2926
2927 /// Whether we should preserve the preview status of a tab containing this buffer.
2928 pub fn preserve_preview(&self) -> bool {
2929 !self.has_edits_since(&self.preview_version)
2930 }
2931}
2932
2933#[doc(hidden)]
2934#[cfg(any(test, feature = "test-support"))]
2935impl Buffer {
2936 pub fn edit_via_marked_text(
2937 &mut self,
2938 marked_string: &str,
2939 autoindent_mode: Option<AutoindentMode>,
2940 cx: &mut Context<Self>,
2941 ) {
2942 let edits = self.edits_for_marked_text(marked_string);
2943 self.edit(edits, autoindent_mode, cx);
2944 }
2945
2946 pub fn set_group_interval(&mut self, group_interval: Duration) {
2947 self.text.set_group_interval(group_interval);
2948 }
2949
2950 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2951 where
2952 T: rand::Rng,
2953 {
2954 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2955 let mut last_end = None;
2956 for _ in 0..old_range_count {
2957 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2958 break;
2959 }
2960
2961 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2962 let mut range = self.random_byte_range(new_start, rng);
2963 if rng.random_bool(0.2) {
2964 mem::swap(&mut range.start, &mut range.end);
2965 }
2966 last_end = Some(range.end);
2967
2968 let new_text_len = rng.random_range(0..10);
2969 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2970 new_text = new_text.to_uppercase();
2971
2972 edits.push((range, new_text));
2973 }
2974 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2975 self.edit(edits, None, cx);
2976 }
2977
2978 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2979 let was_dirty = self.is_dirty();
2980 let old_version = self.version.clone();
2981
2982 let ops = self.text.randomly_undo_redo(rng);
2983 if !ops.is_empty() {
2984 for op in ops {
2985 self.send_operation(Operation::Buffer(op), true, cx);
2986 self.did_edit(&old_version, was_dirty, cx);
2987 }
2988 }
2989 }
2990}
2991
2992impl EventEmitter<BufferEvent> for Buffer {}
2993
2994impl Deref for Buffer {
2995 type Target = TextBuffer;
2996
2997 fn deref(&self) -> &Self::Target {
2998 &self.text
2999 }
3000}
3001
3002impl BufferSnapshot {
3003 /// Returns [`IndentSize`] for a given line that respects user settings and
3004 /// language preferences.
3005 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3006 indent_size_for_line(self, row)
3007 }
3008
3009 /// Returns [`IndentSize`] for a given position that respects user settings
3010 /// and language preferences.
3011 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3012 let settings = language_settings(
3013 self.language_at(position).map(|l| l.name()),
3014 self.file(),
3015 cx,
3016 );
3017 if settings.hard_tabs {
3018 IndentSize::tab()
3019 } else {
3020 IndentSize::spaces(settings.tab_size.get())
3021 }
3022 }
3023
3024 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3025 /// is passed in as `single_indent_size`.
3026 pub fn suggested_indents(
3027 &self,
3028 rows: impl Iterator<Item = u32>,
3029 single_indent_size: IndentSize,
3030 ) -> BTreeMap<u32, IndentSize> {
3031 let mut result = BTreeMap::new();
3032
3033 for row_range in contiguous_ranges(rows, 10) {
3034 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3035 Some(suggestions) => suggestions,
3036 _ => break,
3037 };
3038
3039 for (row, suggestion) in row_range.zip(suggestions) {
3040 let indent_size = if let Some(suggestion) = suggestion {
3041 result
3042 .get(&suggestion.basis_row)
3043 .copied()
3044 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3045 .with_delta(suggestion.delta, single_indent_size)
3046 } else {
3047 self.indent_size_for_line(row)
3048 };
3049
3050 result.insert(row, indent_size);
3051 }
3052 }
3053
3054 result
3055 }
3056
3057 fn suggest_autoindents(
3058 &self,
3059 row_range: Range<u32>,
3060 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3061 let config = &self.language.as_ref()?.config;
3062 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3063
3064 #[derive(Debug, Clone)]
3065 struct StartPosition {
3066 start: Point,
3067 suffix: SharedString,
3068 }
3069
3070 // Find the suggested indentation ranges based on the syntax tree.
3071 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3072 let end = Point::new(row_range.end, 0);
3073 let range = (start..end).to_offset(&self.text);
3074 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3075 Some(&grammar.indents_config.as_ref()?.query)
3076 });
3077 let indent_configs = matches
3078 .grammars()
3079 .iter()
3080 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3081 .collect::<Vec<_>>();
3082
3083 let mut indent_ranges = Vec::<Range<Point>>::new();
3084 let mut start_positions = Vec::<StartPosition>::new();
3085 let mut outdent_positions = Vec::<Point>::new();
3086 while let Some(mat) = matches.peek() {
3087 let mut start: Option<Point> = None;
3088 let mut end: Option<Point> = None;
3089
3090 let config = indent_configs[mat.grammar_index];
3091 for capture in mat.captures {
3092 if capture.index == config.indent_capture_ix {
3093 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3094 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3095 } else if Some(capture.index) == config.start_capture_ix {
3096 start = Some(Point::from_ts_point(capture.node.end_position()));
3097 } else if Some(capture.index) == config.end_capture_ix {
3098 end = Some(Point::from_ts_point(capture.node.start_position()));
3099 } else if Some(capture.index) == config.outdent_capture_ix {
3100 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3101 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3102 start_positions.push(StartPosition {
3103 start: Point::from_ts_point(capture.node.start_position()),
3104 suffix: suffix.clone(),
3105 });
3106 }
3107 }
3108
3109 matches.advance();
3110 if let Some((start, end)) = start.zip(end) {
3111 if start.row == end.row {
3112 continue;
3113 }
3114 let range = start..end;
3115 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3116 Err(ix) => indent_ranges.insert(ix, range),
3117 Ok(ix) => {
3118 let prev_range = &mut indent_ranges[ix];
3119 prev_range.end = prev_range.end.max(range.end);
3120 }
3121 }
3122 }
3123 }
3124
3125 let mut error_ranges = Vec::<Range<Point>>::new();
3126 let mut matches = self
3127 .syntax
3128 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3129 while let Some(mat) = matches.peek() {
3130 let node = mat.captures[0].node;
3131 let start = Point::from_ts_point(node.start_position());
3132 let end = Point::from_ts_point(node.end_position());
3133 let range = start..end;
3134 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3135 Ok(ix) | Err(ix) => ix,
3136 };
3137 let mut end_ix = ix;
3138 while let Some(existing_range) = error_ranges.get(end_ix) {
3139 if existing_range.end < end {
3140 end_ix += 1;
3141 } else {
3142 break;
3143 }
3144 }
3145 error_ranges.splice(ix..end_ix, [range]);
3146 matches.advance();
3147 }
3148
3149 outdent_positions.sort();
3150 for outdent_position in outdent_positions {
3151 // find the innermost indent range containing this outdent_position
3152 // set its end to the outdent position
3153 if let Some(range_to_truncate) = indent_ranges
3154 .iter_mut()
3155 .filter(|indent_range| indent_range.contains(&outdent_position))
3156 .next_back()
3157 {
3158 range_to_truncate.end = outdent_position;
3159 }
3160 }
3161
3162 start_positions.sort_by_key(|b| b.start);
3163
3164 // Find the suggested indentation increases and decreased based on regexes.
3165 let mut regex_outdent_map = HashMap::default();
3166 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3167 let mut start_positions_iter = start_positions.iter().peekable();
3168
3169 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3170 self.for_each_line(
3171 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3172 ..Point::new(row_range.end, 0),
3173 |row, line| {
3174 if config
3175 .decrease_indent_pattern
3176 .as_ref()
3177 .is_some_and(|regex| regex.is_match(line))
3178 {
3179 indent_change_rows.push((row, Ordering::Less));
3180 }
3181 if config
3182 .increase_indent_pattern
3183 .as_ref()
3184 .is_some_and(|regex| regex.is_match(line))
3185 {
3186 indent_change_rows.push((row + 1, Ordering::Greater));
3187 }
3188 while let Some(pos) = start_positions_iter.peek() {
3189 if pos.start.row < row {
3190 let pos = start_positions_iter.next().unwrap();
3191 last_seen_suffix
3192 .entry(pos.suffix.to_string())
3193 .or_default()
3194 .push(pos.start);
3195 } else {
3196 break;
3197 }
3198 }
3199 for rule in &config.decrease_indent_patterns {
3200 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3201 let row_start_column = self.indent_size_for_line(row).len;
3202 let basis_row = rule
3203 .valid_after
3204 .iter()
3205 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3206 .flatten()
3207 .filter(|start_point| start_point.column <= row_start_column)
3208 .max_by_key(|start_point| start_point.row);
3209 if let Some(outdent_to_row) = basis_row {
3210 regex_outdent_map.insert(row, outdent_to_row.row);
3211 }
3212 break;
3213 }
3214 }
3215 },
3216 );
3217
3218 let mut indent_changes = indent_change_rows.into_iter().peekable();
3219 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3220 prev_non_blank_row.unwrap_or(0)
3221 } else {
3222 row_range.start.saturating_sub(1)
3223 };
3224
3225 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3226 Some(row_range.map(move |row| {
3227 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3228
3229 let mut indent_from_prev_row = false;
3230 let mut outdent_from_prev_row = false;
3231 let mut outdent_to_row = u32::MAX;
3232 let mut from_regex = false;
3233
3234 while let Some((indent_row, delta)) = indent_changes.peek() {
3235 match indent_row.cmp(&row) {
3236 Ordering::Equal => match delta {
3237 Ordering::Less => {
3238 from_regex = true;
3239 outdent_from_prev_row = true
3240 }
3241 Ordering::Greater => {
3242 indent_from_prev_row = true;
3243 from_regex = true
3244 }
3245 _ => {}
3246 },
3247
3248 Ordering::Greater => break,
3249 Ordering::Less => {}
3250 }
3251
3252 indent_changes.next();
3253 }
3254
3255 for range in &indent_ranges {
3256 if range.start.row >= row {
3257 break;
3258 }
3259 if range.start.row == prev_row && range.end > row_start {
3260 indent_from_prev_row = true;
3261 }
3262 if range.end > prev_row_start && range.end <= row_start {
3263 outdent_to_row = outdent_to_row.min(range.start.row);
3264 }
3265 }
3266
3267 if let Some(basis_row) = regex_outdent_map.get(&row) {
3268 indent_from_prev_row = false;
3269 outdent_to_row = *basis_row;
3270 from_regex = true;
3271 }
3272
3273 let within_error = error_ranges
3274 .iter()
3275 .any(|e| e.start.row < row && e.end > row_start);
3276
3277 let suggestion = if outdent_to_row == prev_row
3278 || (outdent_from_prev_row && indent_from_prev_row)
3279 {
3280 Some(IndentSuggestion {
3281 basis_row: prev_row,
3282 delta: Ordering::Equal,
3283 within_error: within_error && !from_regex,
3284 })
3285 } else if indent_from_prev_row {
3286 Some(IndentSuggestion {
3287 basis_row: prev_row,
3288 delta: Ordering::Greater,
3289 within_error: within_error && !from_regex,
3290 })
3291 } else if outdent_to_row < prev_row {
3292 Some(IndentSuggestion {
3293 basis_row: outdent_to_row,
3294 delta: Ordering::Equal,
3295 within_error: within_error && !from_regex,
3296 })
3297 } else if outdent_from_prev_row {
3298 Some(IndentSuggestion {
3299 basis_row: prev_row,
3300 delta: Ordering::Less,
3301 within_error: within_error && !from_regex,
3302 })
3303 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3304 {
3305 Some(IndentSuggestion {
3306 basis_row: prev_row,
3307 delta: Ordering::Equal,
3308 within_error: within_error && !from_regex,
3309 })
3310 } else {
3311 None
3312 };
3313
3314 prev_row = row;
3315 prev_row_start = row_start;
3316 suggestion
3317 }))
3318 }
3319
3320 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3321 while row > 0 {
3322 row -= 1;
3323 if !self.is_line_blank(row) {
3324 return Some(row);
3325 }
3326 }
3327 None
3328 }
3329
3330 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3331 let captures = self.syntax.captures(range, &self.text, |grammar| {
3332 grammar
3333 .highlights_config
3334 .as_ref()
3335 .map(|config| &config.query)
3336 });
3337 let highlight_maps = captures
3338 .grammars()
3339 .iter()
3340 .map(|grammar| grammar.highlight_map())
3341 .collect();
3342 (captures, highlight_maps)
3343 }
3344
3345 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3346 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3347 /// returned in chunks where each chunk has a single syntax highlighting style and
3348 /// diagnostic status.
3349 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3350 let range = range.start.to_offset(self)..range.end.to_offset(self);
3351
3352 let mut syntax = None;
3353 if language_aware {
3354 syntax = Some(self.get_highlights(range.clone()));
3355 }
3356 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3357 let diagnostics = language_aware;
3358 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3359 }
3360
3361 pub fn highlighted_text_for_range<T: ToOffset>(
3362 &self,
3363 range: Range<T>,
3364 override_style: Option<HighlightStyle>,
3365 syntax_theme: &SyntaxTheme,
3366 ) -> HighlightedText {
3367 HighlightedText::from_buffer_range(
3368 range,
3369 &self.text,
3370 &self.syntax,
3371 override_style,
3372 syntax_theme,
3373 )
3374 }
3375
3376 /// Invokes the given callback for each line of text in the given range of the buffer.
3377 /// Uses callback to avoid allocating a string for each line.
3378 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3379 let mut line = String::new();
3380 let mut row = range.start.row;
3381 for chunk in self
3382 .as_rope()
3383 .chunks_in_range(range.to_offset(self))
3384 .chain(["\n"])
3385 {
3386 for (newline_ix, text) in chunk.split('\n').enumerate() {
3387 if newline_ix > 0 {
3388 callback(row, &line);
3389 row += 1;
3390 line.clear();
3391 }
3392 line.push_str(text);
3393 }
3394 }
3395 }
3396
3397 /// Iterates over every [`SyntaxLayer`] in the buffer.
3398 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3399 self.syntax_layers_for_range(0..self.len(), true)
3400 }
3401
3402 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3403 let offset = position.to_offset(self);
3404 self.syntax_layers_for_range(offset..offset, false)
3405 .filter(|l| l.node().end_byte() > offset)
3406 .last()
3407 }
3408
3409 pub fn syntax_layers_for_range<D: ToOffset>(
3410 &self,
3411 range: Range<D>,
3412 include_hidden: bool,
3413 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3414 self.syntax
3415 .layers_for_range(range, &self.text, include_hidden)
3416 }
3417
3418 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3419 &self,
3420 range: Range<D>,
3421 ) -> Option<SyntaxLayer<'_>> {
3422 let range = range.to_offset(self);
3423 self.syntax
3424 .layers_for_range(range, &self.text, false)
3425 .max_by(|a, b| {
3426 if a.depth != b.depth {
3427 a.depth.cmp(&b.depth)
3428 } else if a.offset.0 != b.offset.0 {
3429 a.offset.0.cmp(&b.offset.0)
3430 } else {
3431 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3432 }
3433 })
3434 }
3435
3436 /// Returns the main [`Language`].
3437 pub fn language(&self) -> Option<&Arc<Language>> {
3438 self.language.as_ref()
3439 }
3440
3441 /// Returns the [`Language`] at the given location.
3442 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3443 self.syntax_layer_at(position)
3444 .map(|info| info.language)
3445 .or(self.language.as_ref())
3446 }
3447
3448 /// Returns the settings for the language at the given location.
3449 pub fn settings_at<'a, D: ToOffset>(
3450 &'a self,
3451 position: D,
3452 cx: &'a App,
3453 ) -> Cow<'a, LanguageSettings> {
3454 language_settings(
3455 self.language_at(position).map(|l| l.name()),
3456 self.file.as_ref(),
3457 cx,
3458 )
3459 }
3460
3461 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3462 CharClassifier::new(self.language_scope_at(point))
3463 }
3464
3465 /// Returns the [`LanguageScope`] at the given location.
3466 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3467 let offset = position.to_offset(self);
3468 let mut scope = None;
3469 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3470
3471 // Use the layer that has the smallest node intersecting the given point.
3472 for layer in self
3473 .syntax
3474 .layers_for_range(offset..offset, &self.text, false)
3475 {
3476 let mut cursor = layer.node().walk();
3477
3478 let mut range = None;
3479 loop {
3480 let child_range = cursor.node().byte_range();
3481 if !child_range.contains(&offset) {
3482 break;
3483 }
3484
3485 range = Some(child_range);
3486 if cursor.goto_first_child_for_byte(offset).is_none() {
3487 break;
3488 }
3489 }
3490
3491 if let Some(range) = range
3492 && smallest_range_and_depth.as_ref().is_none_or(
3493 |(smallest_range, smallest_range_depth)| {
3494 if layer.depth > *smallest_range_depth {
3495 true
3496 } else if layer.depth == *smallest_range_depth {
3497 range.len() < smallest_range.len()
3498 } else {
3499 false
3500 }
3501 },
3502 )
3503 {
3504 smallest_range_and_depth = Some((range, layer.depth));
3505 scope = Some(LanguageScope {
3506 language: layer.language.clone(),
3507 override_id: layer.override_id(offset, &self.text),
3508 });
3509 }
3510 }
3511
3512 scope.or_else(|| {
3513 self.language.clone().map(|language| LanguageScope {
3514 language,
3515 override_id: None,
3516 })
3517 })
3518 }
3519
3520 /// Returns a tuple of the range and character kind of the word
3521 /// surrounding the given position.
3522 pub fn surrounding_word<T: ToOffset>(
3523 &self,
3524 start: T,
3525 scope_context: Option<CharScopeContext>,
3526 ) -> (Range<usize>, Option<CharKind>) {
3527 let mut start = start.to_offset(self);
3528 let mut end = start;
3529 let mut next_chars = self.chars_at(start).take(128).peekable();
3530 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3531
3532 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3533 let word_kind = cmp::max(
3534 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3535 next_chars.peek().copied().map(|c| classifier.kind(c)),
3536 );
3537
3538 for ch in prev_chars {
3539 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3540 start -= ch.len_utf8();
3541 } else {
3542 break;
3543 }
3544 }
3545
3546 for ch in next_chars {
3547 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3548 end += ch.len_utf8();
3549 } else {
3550 break;
3551 }
3552 }
3553
3554 (start..end, word_kind)
3555 }
3556
3557 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3558 /// range. When `require_larger` is true, the node found must be larger than the query range.
3559 ///
3560 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3561 /// be moved to the root of the tree.
3562 fn goto_node_enclosing_range(
3563 cursor: &mut tree_sitter::TreeCursor,
3564 query_range: &Range<usize>,
3565 require_larger: bool,
3566 ) -> bool {
3567 let mut ascending = false;
3568 loop {
3569 let mut range = cursor.node().byte_range();
3570 if query_range.is_empty() {
3571 // When the query range is empty and the current node starts after it, move to the
3572 // previous sibling to find the node the containing node.
3573 if range.start > query_range.start {
3574 cursor.goto_previous_sibling();
3575 range = cursor.node().byte_range();
3576 }
3577 } else {
3578 // When the query range is non-empty and the current node ends exactly at the start,
3579 // move to the next sibling to find a node that extends beyond the start.
3580 if range.end == query_range.start {
3581 cursor.goto_next_sibling();
3582 range = cursor.node().byte_range();
3583 }
3584 }
3585
3586 let encloses = range.contains_inclusive(query_range)
3587 && (!require_larger || range.len() > query_range.len());
3588 if !encloses {
3589 ascending = true;
3590 if !cursor.goto_parent() {
3591 return false;
3592 }
3593 continue;
3594 } else if ascending {
3595 return true;
3596 }
3597
3598 // Descend into the current node.
3599 if cursor
3600 .goto_first_child_for_byte(query_range.start)
3601 .is_none()
3602 {
3603 return true;
3604 }
3605 }
3606 }
3607
3608 pub fn syntax_ancestor<'a, T: ToOffset>(
3609 &'a self,
3610 range: Range<T>,
3611 ) -> Option<tree_sitter::Node<'a>> {
3612 let range = range.start.to_offset(self)..range.end.to_offset(self);
3613 let mut result: Option<tree_sitter::Node<'a>> = None;
3614 for layer in self
3615 .syntax
3616 .layers_for_range(range.clone(), &self.text, true)
3617 {
3618 let mut cursor = layer.node().walk();
3619
3620 // Find the node that both contains the range and is larger than it.
3621 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3622 continue;
3623 }
3624
3625 let left_node = cursor.node();
3626 let mut layer_result = left_node;
3627
3628 // For an empty range, try to find another node immediately to the right of the range.
3629 if left_node.end_byte() == range.start {
3630 let mut right_node = None;
3631 while !cursor.goto_next_sibling() {
3632 if !cursor.goto_parent() {
3633 break;
3634 }
3635 }
3636
3637 while cursor.node().start_byte() == range.start {
3638 right_node = Some(cursor.node());
3639 if !cursor.goto_first_child() {
3640 break;
3641 }
3642 }
3643
3644 // If there is a candidate node on both sides of the (empty) range, then
3645 // decide between the two by favoring a named node over an anonymous token.
3646 // If both nodes are the same in that regard, favor the right one.
3647 if let Some(right_node) = right_node
3648 && (right_node.is_named() || !left_node.is_named())
3649 {
3650 layer_result = right_node;
3651 }
3652 }
3653
3654 if let Some(previous_result) = &result
3655 && previous_result.byte_range().len() < layer_result.byte_range().len()
3656 {
3657 continue;
3658 }
3659 result = Some(layer_result);
3660 }
3661
3662 result
3663 }
3664
3665 /// Find the previous sibling syntax node at the given range.
3666 ///
3667 /// This function locates the syntax node that precedes the node containing
3668 /// the given range. It searches hierarchically by:
3669 /// 1. Finding the node that contains the given range
3670 /// 2. Looking for the previous sibling at the same tree level
3671 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3672 ///
3673 /// Returns `None` if there is no previous sibling at any ancestor level.
3674 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3675 &'a self,
3676 range: Range<T>,
3677 ) -> Option<tree_sitter::Node<'a>> {
3678 let range = range.start.to_offset(self)..range.end.to_offset(self);
3679 let mut result: Option<tree_sitter::Node<'a>> = None;
3680
3681 for layer in self
3682 .syntax
3683 .layers_for_range(range.clone(), &self.text, true)
3684 {
3685 let mut cursor = layer.node().walk();
3686
3687 // Find the node that contains the range
3688 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3689 continue;
3690 }
3691
3692 // Look for the previous sibling, moving up ancestor levels if needed
3693 loop {
3694 if cursor.goto_previous_sibling() {
3695 let layer_result = cursor.node();
3696
3697 if let Some(previous_result) = &result {
3698 if previous_result.byte_range().end < layer_result.byte_range().end {
3699 continue;
3700 }
3701 }
3702 result = Some(layer_result);
3703 break;
3704 }
3705
3706 // No sibling found at this level, try moving up to parent
3707 if !cursor.goto_parent() {
3708 break;
3709 }
3710 }
3711 }
3712
3713 result
3714 }
3715
3716 /// Find the next sibling syntax node at the given range.
3717 ///
3718 /// This function locates the syntax node that follows the node containing
3719 /// the given range. It searches hierarchically by:
3720 /// 1. Finding the node that contains the given range
3721 /// 2. Looking for the next sibling at the same tree level
3722 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3723 ///
3724 /// Returns `None` if there is no next sibling at any ancestor level.
3725 pub fn syntax_next_sibling<'a, T: ToOffset>(
3726 &'a self,
3727 range: Range<T>,
3728 ) -> Option<tree_sitter::Node<'a>> {
3729 let range = range.start.to_offset(self)..range.end.to_offset(self);
3730 let mut result: Option<tree_sitter::Node<'a>> = None;
3731
3732 for layer in self
3733 .syntax
3734 .layers_for_range(range.clone(), &self.text, true)
3735 {
3736 let mut cursor = layer.node().walk();
3737
3738 // Find the node that contains the range
3739 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3740 continue;
3741 }
3742
3743 // Look for the next sibling, moving up ancestor levels if needed
3744 loop {
3745 if cursor.goto_next_sibling() {
3746 let layer_result = cursor.node();
3747
3748 if let Some(previous_result) = &result {
3749 if previous_result.byte_range().start > layer_result.byte_range().start {
3750 continue;
3751 }
3752 }
3753 result = Some(layer_result);
3754 break;
3755 }
3756
3757 // No sibling found at this level, try moving up to parent
3758 if !cursor.goto_parent() {
3759 break;
3760 }
3761 }
3762 }
3763
3764 result
3765 }
3766
3767 /// Returns the root syntax node within the given row
3768 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3769 let start_offset = position.to_offset(self);
3770
3771 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3772
3773 let layer = self
3774 .syntax
3775 .layers_for_range(start_offset..start_offset, &self.text, true)
3776 .next()?;
3777
3778 let mut cursor = layer.node().walk();
3779
3780 // Descend to the first leaf that touches the start of the range.
3781 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3782 if cursor.node().end_byte() == start_offset {
3783 cursor.goto_next_sibling();
3784 }
3785 }
3786
3787 // Ascend to the root node within the same row.
3788 while cursor.goto_parent() {
3789 if cursor.node().start_position().row != row {
3790 break;
3791 }
3792 }
3793
3794 Some(cursor.node())
3795 }
3796
3797 /// Returns the outline for the buffer.
3798 ///
3799 /// This method allows passing an optional [`SyntaxTheme`] to
3800 /// syntax-highlight the returned symbols.
3801 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3802 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3803 }
3804
3805 /// Returns all the symbols that contain the given position.
3806 ///
3807 /// This method allows passing an optional [`SyntaxTheme`] to
3808 /// syntax-highlight the returned symbols.
3809 pub fn symbols_containing<T: ToOffset>(
3810 &self,
3811 position: T,
3812 theme: Option<&SyntaxTheme>,
3813 ) -> Vec<OutlineItem<Anchor>> {
3814 let position = position.to_offset(self);
3815 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3816 let end = self.clip_offset(position + 1, Bias::Right);
3817 let mut items = self.outline_items_containing(start..end, false, theme);
3818 let mut prev_depth = None;
3819 items.retain(|item| {
3820 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3821 prev_depth = Some(item.depth);
3822 result
3823 });
3824 items
3825 }
3826
3827 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3828 let range = range.to_offset(self);
3829 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3830 grammar.outline_config.as_ref().map(|c| &c.query)
3831 });
3832 let configs = matches
3833 .grammars()
3834 .iter()
3835 .map(|g| g.outline_config.as_ref().unwrap())
3836 .collect::<Vec<_>>();
3837
3838 while let Some(mat) = matches.peek() {
3839 let config = &configs[mat.grammar_index];
3840 let containing_item_node = maybe!({
3841 let item_node = mat.captures.iter().find_map(|cap| {
3842 if cap.index == config.item_capture_ix {
3843 Some(cap.node)
3844 } else {
3845 None
3846 }
3847 })?;
3848
3849 let item_byte_range = item_node.byte_range();
3850 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3851 None
3852 } else {
3853 Some(item_node)
3854 }
3855 });
3856
3857 if let Some(item_node) = containing_item_node {
3858 return Some(
3859 Point::from_ts_point(item_node.start_position())
3860 ..Point::from_ts_point(item_node.end_position()),
3861 );
3862 }
3863
3864 matches.advance();
3865 }
3866 None
3867 }
3868
3869 pub fn outline_items_containing<T: ToOffset>(
3870 &self,
3871 range: Range<T>,
3872 include_extra_context: bool,
3873 theme: Option<&SyntaxTheme>,
3874 ) -> Vec<OutlineItem<Anchor>> {
3875 self.outline_items_containing_internal(
3876 range,
3877 include_extra_context,
3878 theme,
3879 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3880 )
3881 }
3882
3883 pub fn outline_items_as_points_containing<T: ToOffset>(
3884 &self,
3885 range: Range<T>,
3886 include_extra_context: bool,
3887 theme: Option<&SyntaxTheme>,
3888 ) -> Vec<OutlineItem<Point>> {
3889 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3890 range
3891 })
3892 }
3893
3894 fn outline_items_containing_internal<T: ToOffset, U>(
3895 &self,
3896 range: Range<T>,
3897 include_extra_context: bool,
3898 theme: Option<&SyntaxTheme>,
3899 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3900 ) -> Vec<OutlineItem<U>> {
3901 let range = range.to_offset(self);
3902 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3903 grammar.outline_config.as_ref().map(|c| &c.query)
3904 });
3905
3906 let mut items = Vec::new();
3907 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3908 while let Some(mat) = matches.peek() {
3909 let config = matches.grammars()[mat.grammar_index]
3910 .outline_config
3911 .as_ref()
3912 .unwrap();
3913 if let Some(item) =
3914 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3915 {
3916 items.push(item);
3917 } else if let Some(capture) = mat
3918 .captures
3919 .iter()
3920 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3921 {
3922 let capture_range = capture.node.start_position()..capture.node.end_position();
3923 let mut capture_row_range =
3924 capture_range.start.row as u32..capture_range.end.row as u32;
3925 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3926 {
3927 capture_row_range.end -= 1;
3928 }
3929 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3930 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3931 last_row_range.end = capture_row_range.end;
3932 } else {
3933 annotation_row_ranges.push(capture_row_range);
3934 }
3935 } else {
3936 annotation_row_ranges.push(capture_row_range);
3937 }
3938 }
3939 matches.advance();
3940 }
3941
3942 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3943
3944 // Assign depths based on containment relationships and convert to anchors.
3945 let mut item_ends_stack = Vec::<Point>::new();
3946 let mut anchor_items = Vec::new();
3947 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3948 for item in items {
3949 while let Some(last_end) = item_ends_stack.last().copied() {
3950 if last_end < item.range.end {
3951 item_ends_stack.pop();
3952 } else {
3953 break;
3954 }
3955 }
3956
3957 let mut annotation_row_range = None;
3958 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3959 let row_preceding_item = item.range.start.row.saturating_sub(1);
3960 if next_annotation_row_range.end < row_preceding_item {
3961 annotation_row_ranges.next();
3962 } else {
3963 if next_annotation_row_range.end == row_preceding_item {
3964 annotation_row_range = Some(next_annotation_row_range.clone());
3965 annotation_row_ranges.next();
3966 }
3967 break;
3968 }
3969 }
3970
3971 anchor_items.push(OutlineItem {
3972 depth: item_ends_stack.len(),
3973 range: range_callback(self, item.range.clone()),
3974 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3975 text: item.text,
3976 highlight_ranges: item.highlight_ranges,
3977 name_ranges: item.name_ranges,
3978 body_range: item.body_range.map(|r| range_callback(self, r)),
3979 annotation_range: annotation_row_range.map(|annotation_range| {
3980 let point_range = Point::new(annotation_range.start, 0)
3981 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3982 range_callback(self, point_range)
3983 }),
3984 });
3985 item_ends_stack.push(item.range.end);
3986 }
3987
3988 anchor_items
3989 }
3990
3991 fn next_outline_item(
3992 &self,
3993 config: &OutlineConfig,
3994 mat: &SyntaxMapMatch,
3995 range: &Range<usize>,
3996 include_extra_context: bool,
3997 theme: Option<&SyntaxTheme>,
3998 ) -> Option<OutlineItem<Point>> {
3999 let item_node = mat.captures.iter().find_map(|cap| {
4000 if cap.index == config.item_capture_ix {
4001 Some(cap.node)
4002 } else {
4003 None
4004 }
4005 })?;
4006
4007 let item_byte_range = item_node.byte_range();
4008 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4009 return None;
4010 }
4011 let item_point_range = Point::from_ts_point(item_node.start_position())
4012 ..Point::from_ts_point(item_node.end_position());
4013
4014 let mut open_point = None;
4015 let mut close_point = None;
4016
4017 let mut buffer_ranges = Vec::new();
4018 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4019 let mut range = node.start_byte()..node.end_byte();
4020 let start = node.start_position();
4021 if node.end_position().row > start.row {
4022 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4023 }
4024
4025 if !range.is_empty() {
4026 buffer_ranges.push((range, node_is_name));
4027 }
4028 };
4029
4030 for capture in mat.captures {
4031 if capture.index == config.name_capture_ix {
4032 add_to_buffer_ranges(capture.node, true);
4033 } else if Some(capture.index) == config.context_capture_ix
4034 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4035 {
4036 add_to_buffer_ranges(capture.node, false);
4037 } else {
4038 if Some(capture.index) == config.open_capture_ix {
4039 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4040 } else if Some(capture.index) == config.close_capture_ix {
4041 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4042 }
4043 }
4044 }
4045
4046 if buffer_ranges.is_empty() {
4047 return None;
4048 }
4049 let source_range_for_text =
4050 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4051
4052 let mut text = String::new();
4053 let mut highlight_ranges = Vec::new();
4054 let mut name_ranges = Vec::new();
4055 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4056 let mut last_buffer_range_end = 0;
4057 for (buffer_range, is_name) in buffer_ranges {
4058 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4059 if space_added {
4060 text.push(' ');
4061 }
4062 let before_append_len = text.len();
4063 let mut offset = buffer_range.start;
4064 chunks.seek(buffer_range.clone());
4065 for mut chunk in chunks.by_ref() {
4066 if chunk.text.len() > buffer_range.end - offset {
4067 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4068 offset = buffer_range.end;
4069 } else {
4070 offset += chunk.text.len();
4071 }
4072 let style = chunk
4073 .syntax_highlight_id
4074 .zip(theme)
4075 .and_then(|(highlight, theme)| highlight.style(theme));
4076 if let Some(style) = style {
4077 let start = text.len();
4078 let end = start + chunk.text.len();
4079 highlight_ranges.push((start..end, style));
4080 }
4081 text.push_str(chunk.text);
4082 if offset >= buffer_range.end {
4083 break;
4084 }
4085 }
4086 if is_name {
4087 let after_append_len = text.len();
4088 let start = if space_added && !name_ranges.is_empty() {
4089 before_append_len - 1
4090 } else {
4091 before_append_len
4092 };
4093 name_ranges.push(start..after_append_len);
4094 }
4095 last_buffer_range_end = buffer_range.end;
4096 }
4097
4098 Some(OutlineItem {
4099 depth: 0, // We'll calculate the depth later
4100 range: item_point_range,
4101 source_range_for_text: source_range_for_text.to_point(self),
4102 text,
4103 highlight_ranges,
4104 name_ranges,
4105 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4106 annotation_range: None,
4107 })
4108 }
4109
4110 pub fn function_body_fold_ranges<T: ToOffset>(
4111 &self,
4112 within: Range<T>,
4113 ) -> impl Iterator<Item = Range<usize>> + '_ {
4114 self.text_object_ranges(within, TreeSitterOptions::default())
4115 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4116 }
4117
4118 /// For each grammar in the language, runs the provided
4119 /// [`tree_sitter::Query`] against the given range.
4120 pub fn matches(
4121 &self,
4122 range: Range<usize>,
4123 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4124 ) -> SyntaxMapMatches<'_> {
4125 self.syntax.matches(range, self, query)
4126 }
4127
4128 pub fn all_bracket_ranges(
4129 &self,
4130 range: Range<usize>,
4131 ) -> impl Iterator<Item = BracketMatch> + '_ {
4132 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4133 grammar.brackets_config.as_ref().map(|c| &c.query)
4134 });
4135 let configs = matches
4136 .grammars()
4137 .iter()
4138 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4139 .collect::<Vec<_>>();
4140
4141 iter::from_fn(move || {
4142 while let Some(mat) = matches.peek() {
4143 let mut open = None;
4144 let mut close = None;
4145 let config = &configs[mat.grammar_index];
4146 let pattern = &config.patterns[mat.pattern_index];
4147 for capture in mat.captures {
4148 if capture.index == config.open_capture_ix {
4149 open = Some(capture.node.byte_range());
4150 } else if capture.index == config.close_capture_ix {
4151 close = Some(capture.node.byte_range());
4152 }
4153 }
4154
4155 matches.advance();
4156
4157 let Some((open_range, close_range)) = open.zip(close) else {
4158 continue;
4159 };
4160
4161 let bracket_range = open_range.start..=close_range.end;
4162 if !bracket_range.overlaps(&range) {
4163 continue;
4164 }
4165
4166 return Some(BracketMatch {
4167 open_range,
4168 close_range,
4169 newline_only: pattern.newline_only,
4170 });
4171 }
4172 None
4173 })
4174 }
4175
4176 /// Returns bracket range pairs overlapping or adjacent to `range`
4177 pub fn bracket_ranges<T: ToOffset>(
4178 &self,
4179 range: Range<T>,
4180 ) -> impl Iterator<Item = BracketMatch> + '_ {
4181 // Find bracket pairs that *inclusively* contain the given range.
4182 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4183 self.all_bracket_ranges(range)
4184 .filter(|pair| !pair.newline_only)
4185 }
4186
4187 pub fn debug_variables_query<T: ToOffset>(
4188 &self,
4189 range: Range<T>,
4190 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4191 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4192
4193 let mut matches = self.syntax.matches_with_options(
4194 range.clone(),
4195 &self.text,
4196 TreeSitterOptions::default(),
4197 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4198 );
4199
4200 let configs = matches
4201 .grammars()
4202 .iter()
4203 .map(|grammar| grammar.debug_variables_config.as_ref())
4204 .collect::<Vec<_>>();
4205
4206 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4207
4208 iter::from_fn(move || {
4209 loop {
4210 while let Some(capture) = captures.pop() {
4211 if capture.0.overlaps(&range) {
4212 return Some(capture);
4213 }
4214 }
4215
4216 let mat = matches.peek()?;
4217
4218 let Some(config) = configs[mat.grammar_index].as_ref() else {
4219 matches.advance();
4220 continue;
4221 };
4222
4223 for capture in mat.captures {
4224 let Some(ix) = config
4225 .objects_by_capture_ix
4226 .binary_search_by_key(&capture.index, |e| e.0)
4227 .ok()
4228 else {
4229 continue;
4230 };
4231 let text_object = config.objects_by_capture_ix[ix].1;
4232 let byte_range = capture.node.byte_range();
4233
4234 let mut found = false;
4235 for (range, existing) in captures.iter_mut() {
4236 if existing == &text_object {
4237 range.start = range.start.min(byte_range.start);
4238 range.end = range.end.max(byte_range.end);
4239 found = true;
4240 break;
4241 }
4242 }
4243
4244 if !found {
4245 captures.push((byte_range, text_object));
4246 }
4247 }
4248
4249 matches.advance();
4250 }
4251 })
4252 }
4253
4254 pub fn text_object_ranges<T: ToOffset>(
4255 &self,
4256 range: Range<T>,
4257 options: TreeSitterOptions,
4258 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4259 let range =
4260 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4261
4262 let mut matches =
4263 self.syntax
4264 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4265 grammar.text_object_config.as_ref().map(|c| &c.query)
4266 });
4267
4268 let configs = matches
4269 .grammars()
4270 .iter()
4271 .map(|grammar| grammar.text_object_config.as_ref())
4272 .collect::<Vec<_>>();
4273
4274 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4275
4276 iter::from_fn(move || {
4277 loop {
4278 while let Some(capture) = captures.pop() {
4279 if capture.0.overlaps(&range) {
4280 return Some(capture);
4281 }
4282 }
4283
4284 let mat = matches.peek()?;
4285
4286 let Some(config) = configs[mat.grammar_index].as_ref() else {
4287 matches.advance();
4288 continue;
4289 };
4290
4291 for capture in mat.captures {
4292 let Some(ix) = config
4293 .text_objects_by_capture_ix
4294 .binary_search_by_key(&capture.index, |e| e.0)
4295 .ok()
4296 else {
4297 continue;
4298 };
4299 let text_object = config.text_objects_by_capture_ix[ix].1;
4300 let byte_range = capture.node.byte_range();
4301
4302 let mut found = false;
4303 for (range, existing) in captures.iter_mut() {
4304 if existing == &text_object {
4305 range.start = range.start.min(byte_range.start);
4306 range.end = range.end.max(byte_range.end);
4307 found = true;
4308 break;
4309 }
4310 }
4311
4312 if !found {
4313 captures.push((byte_range, text_object));
4314 }
4315 }
4316
4317 matches.advance();
4318 }
4319 })
4320 }
4321
4322 /// Returns enclosing bracket ranges containing the given range
4323 pub fn enclosing_bracket_ranges<T: ToOffset>(
4324 &self,
4325 range: Range<T>,
4326 ) -> impl Iterator<Item = BracketMatch> + '_ {
4327 let range = range.start.to_offset(self)..range.end.to_offset(self);
4328
4329 self.bracket_ranges(range.clone()).filter(move |pair| {
4330 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4331 })
4332 }
4333
4334 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4335 ///
4336 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4337 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4338 &self,
4339 range: Range<T>,
4340 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4341 ) -> Option<(Range<usize>, Range<usize>)> {
4342 let range = range.start.to_offset(self)..range.end.to_offset(self);
4343
4344 // Get the ranges of the innermost pair of brackets.
4345 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4346
4347 for pair in self.enclosing_bracket_ranges(range) {
4348 if let Some(range_filter) = range_filter
4349 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4350 {
4351 continue;
4352 }
4353
4354 let len = pair.close_range.end - pair.open_range.start;
4355
4356 if let Some((existing_open, existing_close)) = &result {
4357 let existing_len = existing_close.end - existing_open.start;
4358 if len > existing_len {
4359 continue;
4360 }
4361 }
4362
4363 result = Some((pair.open_range, pair.close_range));
4364 }
4365
4366 result
4367 }
4368
4369 /// Returns anchor ranges for any matches of the redaction query.
4370 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4371 /// will be run on the relevant section of the buffer.
4372 pub fn redacted_ranges<T: ToOffset>(
4373 &self,
4374 range: Range<T>,
4375 ) -> impl Iterator<Item = Range<usize>> + '_ {
4376 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4377 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4378 grammar
4379 .redactions_config
4380 .as_ref()
4381 .map(|config| &config.query)
4382 });
4383
4384 let configs = syntax_matches
4385 .grammars()
4386 .iter()
4387 .map(|grammar| grammar.redactions_config.as_ref())
4388 .collect::<Vec<_>>();
4389
4390 iter::from_fn(move || {
4391 let redacted_range = syntax_matches
4392 .peek()
4393 .and_then(|mat| {
4394 configs[mat.grammar_index].and_then(|config| {
4395 mat.captures
4396 .iter()
4397 .find(|capture| capture.index == config.redaction_capture_ix)
4398 })
4399 })
4400 .map(|mat| mat.node.byte_range());
4401 syntax_matches.advance();
4402 redacted_range
4403 })
4404 }
4405
4406 pub fn injections_intersecting_range<T: ToOffset>(
4407 &self,
4408 range: Range<T>,
4409 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4410 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4411
4412 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4413 grammar
4414 .injection_config
4415 .as_ref()
4416 .map(|config| &config.query)
4417 });
4418
4419 let configs = syntax_matches
4420 .grammars()
4421 .iter()
4422 .map(|grammar| grammar.injection_config.as_ref())
4423 .collect::<Vec<_>>();
4424
4425 iter::from_fn(move || {
4426 let ranges = syntax_matches.peek().and_then(|mat| {
4427 let config = &configs[mat.grammar_index]?;
4428 let content_capture_range = mat.captures.iter().find_map(|capture| {
4429 if capture.index == config.content_capture_ix {
4430 Some(capture.node.byte_range())
4431 } else {
4432 None
4433 }
4434 })?;
4435 let language = self.language_at(content_capture_range.start)?;
4436 Some((content_capture_range, language))
4437 });
4438 syntax_matches.advance();
4439 ranges
4440 })
4441 }
4442
4443 pub fn runnable_ranges(
4444 &self,
4445 offset_range: Range<usize>,
4446 ) -> impl Iterator<Item = RunnableRange> + '_ {
4447 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4448 grammar.runnable_config.as_ref().map(|config| &config.query)
4449 });
4450
4451 let test_configs = syntax_matches
4452 .grammars()
4453 .iter()
4454 .map(|grammar| grammar.runnable_config.as_ref())
4455 .collect::<Vec<_>>();
4456
4457 iter::from_fn(move || {
4458 loop {
4459 let mat = syntax_matches.peek()?;
4460
4461 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4462 let mut run_range = None;
4463 let full_range = mat.captures.iter().fold(
4464 Range {
4465 start: usize::MAX,
4466 end: 0,
4467 },
4468 |mut acc, next| {
4469 let byte_range = next.node.byte_range();
4470 if acc.start > byte_range.start {
4471 acc.start = byte_range.start;
4472 }
4473 if acc.end < byte_range.end {
4474 acc.end = byte_range.end;
4475 }
4476 acc
4477 },
4478 );
4479 if full_range.start > full_range.end {
4480 // We did not find a full spanning range of this match.
4481 return None;
4482 }
4483 let extra_captures: SmallVec<[_; 1]> =
4484 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4485 test_configs
4486 .extra_captures
4487 .get(capture.index as usize)
4488 .cloned()
4489 .and_then(|tag_name| match tag_name {
4490 RunnableCapture::Named(name) => {
4491 Some((capture.node.byte_range(), name))
4492 }
4493 RunnableCapture::Run => {
4494 let _ = run_range.insert(capture.node.byte_range());
4495 None
4496 }
4497 })
4498 }));
4499 let run_range = run_range?;
4500 let tags = test_configs
4501 .query
4502 .property_settings(mat.pattern_index)
4503 .iter()
4504 .filter_map(|property| {
4505 if *property.key == *"tag" {
4506 property
4507 .value
4508 .as_ref()
4509 .map(|value| RunnableTag(value.to_string().into()))
4510 } else {
4511 None
4512 }
4513 })
4514 .collect();
4515 let extra_captures = extra_captures
4516 .into_iter()
4517 .map(|(range, name)| {
4518 (
4519 name.to_string(),
4520 self.text_for_range(range).collect::<String>(),
4521 )
4522 })
4523 .collect();
4524 // All tags should have the same range.
4525 Some(RunnableRange {
4526 run_range,
4527 full_range,
4528 runnable: Runnable {
4529 tags,
4530 language: mat.language,
4531 buffer: self.remote_id(),
4532 },
4533 extra_captures,
4534 buffer_id: self.remote_id(),
4535 })
4536 });
4537
4538 syntax_matches.advance();
4539 if test_range.is_some() {
4540 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4541 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4542 return test_range;
4543 }
4544 }
4545 })
4546 }
4547
4548 /// Returns selections for remote peers intersecting the given range.
4549 #[allow(clippy::type_complexity)]
4550 pub fn selections_in_range(
4551 &self,
4552 range: Range<Anchor>,
4553 include_local: bool,
4554 ) -> impl Iterator<
4555 Item = (
4556 ReplicaId,
4557 bool,
4558 CursorShape,
4559 impl Iterator<Item = &Selection<Anchor>> + '_,
4560 ),
4561 > + '_ {
4562 self.remote_selections
4563 .iter()
4564 .filter(move |(replica_id, set)| {
4565 (include_local || **replica_id != self.text.replica_id())
4566 && !set.selections.is_empty()
4567 })
4568 .map(move |(replica_id, set)| {
4569 let start_ix = match set.selections.binary_search_by(|probe| {
4570 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4571 }) {
4572 Ok(ix) | Err(ix) => ix,
4573 };
4574 let end_ix = match set.selections.binary_search_by(|probe| {
4575 probe.start.cmp(&range.end, self).then(Ordering::Less)
4576 }) {
4577 Ok(ix) | Err(ix) => ix,
4578 };
4579
4580 (
4581 *replica_id,
4582 set.line_mode,
4583 set.cursor_shape,
4584 set.selections[start_ix..end_ix].iter(),
4585 )
4586 })
4587 }
4588
4589 /// Returns if the buffer contains any diagnostics.
4590 pub fn has_diagnostics(&self) -> bool {
4591 !self.diagnostics.is_empty()
4592 }
4593
4594 /// Returns all the diagnostics intersecting the given range.
4595 pub fn diagnostics_in_range<'a, T, O>(
4596 &'a self,
4597 search_range: Range<T>,
4598 reversed: bool,
4599 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4600 where
4601 T: 'a + Clone + ToOffset,
4602 O: 'a + FromAnchor,
4603 {
4604 let mut iterators: Vec<_> = self
4605 .diagnostics
4606 .iter()
4607 .map(|(_, collection)| {
4608 collection
4609 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4610 .peekable()
4611 })
4612 .collect();
4613
4614 std::iter::from_fn(move || {
4615 let (next_ix, _) = iterators
4616 .iter_mut()
4617 .enumerate()
4618 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4619 .min_by(|(_, a), (_, b)| {
4620 let cmp = a
4621 .range
4622 .start
4623 .cmp(&b.range.start, self)
4624 // when range is equal, sort by diagnostic severity
4625 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4626 // and stabilize order with group_id
4627 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4628 if reversed { cmp.reverse() } else { cmp }
4629 })?;
4630 iterators[next_ix]
4631 .next()
4632 .map(
4633 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4634 diagnostic,
4635 range: FromAnchor::from_anchor(&range.start, self)
4636 ..FromAnchor::from_anchor(&range.end, self),
4637 },
4638 )
4639 })
4640 }
4641
4642 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4643 /// should be used instead.
4644 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4645 &self.diagnostics
4646 }
4647
4648 /// Returns all the diagnostic groups associated with the given
4649 /// language server ID. If no language server ID is provided,
4650 /// all diagnostics groups are returned.
4651 pub fn diagnostic_groups(
4652 &self,
4653 language_server_id: Option<LanguageServerId>,
4654 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4655 let mut groups = Vec::new();
4656
4657 if let Some(language_server_id) = language_server_id {
4658 if let Ok(ix) = self
4659 .diagnostics
4660 .binary_search_by_key(&language_server_id, |e| e.0)
4661 {
4662 self.diagnostics[ix]
4663 .1
4664 .groups(language_server_id, &mut groups, self);
4665 }
4666 } else {
4667 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4668 diagnostics.groups(*language_server_id, &mut groups, self);
4669 }
4670 }
4671
4672 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4673 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4674 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4675 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4676 });
4677
4678 groups
4679 }
4680
4681 /// Returns an iterator over the diagnostics for the given group.
4682 pub fn diagnostic_group<O>(
4683 &self,
4684 group_id: usize,
4685 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4686 where
4687 O: FromAnchor + 'static,
4688 {
4689 self.diagnostics
4690 .iter()
4691 .flat_map(move |(_, set)| set.group(group_id, self))
4692 }
4693
4694 /// An integer version number that accounts for all updates besides
4695 /// the buffer's text itself (which is versioned via a version vector).
4696 pub fn non_text_state_update_count(&self) -> usize {
4697 self.non_text_state_update_count
4698 }
4699
4700 /// An integer version that changes when the buffer's syntax changes.
4701 pub fn syntax_update_count(&self) -> usize {
4702 self.syntax.update_count()
4703 }
4704
4705 /// Returns a snapshot of underlying file.
4706 pub fn file(&self) -> Option<&Arc<dyn File>> {
4707 self.file.as_ref()
4708 }
4709
4710 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4711 if let Some(file) = self.file() {
4712 if file.path().file_name().is_none() || include_root {
4713 Some(file.full_path(cx).to_string_lossy().into_owned())
4714 } else {
4715 Some(file.path().display(file.path_style(cx)).to_string())
4716 }
4717 } else {
4718 None
4719 }
4720 }
4721
4722 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4723 let query_str = query.fuzzy_contents;
4724 if query_str.is_some_and(|query| query.is_empty()) {
4725 return BTreeMap::default();
4726 }
4727
4728 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4729 language,
4730 override_id: None,
4731 }));
4732
4733 let mut query_ix = 0;
4734 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4735 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4736
4737 let mut words = BTreeMap::default();
4738 let mut current_word_start_ix = None;
4739 let mut chunk_ix = query.range.start;
4740 for chunk in self.chunks(query.range, false) {
4741 for (i, c) in chunk.text.char_indices() {
4742 let ix = chunk_ix + i;
4743 if classifier.is_word(c) {
4744 if current_word_start_ix.is_none() {
4745 current_word_start_ix = Some(ix);
4746 }
4747
4748 if let Some(query_chars) = &query_chars
4749 && query_ix < query_len
4750 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4751 {
4752 query_ix += 1;
4753 }
4754 continue;
4755 } else if let Some(word_start) = current_word_start_ix.take()
4756 && query_ix == query_len
4757 {
4758 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4759 let mut word_text = self.text_for_range(word_start..ix).peekable();
4760 let first_char = word_text
4761 .peek()
4762 .and_then(|first_chunk| first_chunk.chars().next());
4763 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4764 if !query.skip_digits
4765 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4766 {
4767 words.insert(word_text.collect(), word_range);
4768 }
4769 }
4770 query_ix = 0;
4771 }
4772 chunk_ix += chunk.text.len();
4773 }
4774
4775 words
4776 }
4777}
4778
4779pub struct WordsQuery<'a> {
4780 /// Only returns words with all chars from the fuzzy string in them.
4781 pub fuzzy_contents: Option<&'a str>,
4782 /// Skips words that start with a digit.
4783 pub skip_digits: bool,
4784 /// Buffer offset range, to look for words.
4785 pub range: Range<usize>,
4786}
4787
4788fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4789 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4790}
4791
4792fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4793 let mut result = IndentSize::spaces(0);
4794 for c in text {
4795 let kind = match c {
4796 ' ' => IndentKind::Space,
4797 '\t' => IndentKind::Tab,
4798 _ => break,
4799 };
4800 if result.len == 0 {
4801 result.kind = kind;
4802 }
4803 result.len += 1;
4804 }
4805 result
4806}
4807
4808impl Clone for BufferSnapshot {
4809 fn clone(&self) -> Self {
4810 Self {
4811 text: self.text.clone(),
4812 syntax: self.syntax.clone(),
4813 file: self.file.clone(),
4814 remote_selections: self.remote_selections.clone(),
4815 diagnostics: self.diagnostics.clone(),
4816 language: self.language.clone(),
4817 non_text_state_update_count: self.non_text_state_update_count,
4818 }
4819 }
4820}
4821
4822impl Deref for BufferSnapshot {
4823 type Target = text::BufferSnapshot;
4824
4825 fn deref(&self) -> &Self::Target {
4826 &self.text
4827 }
4828}
4829
4830unsafe impl Send for BufferChunks<'_> {}
4831
4832impl<'a> BufferChunks<'a> {
4833 pub(crate) fn new(
4834 text: &'a Rope,
4835 range: Range<usize>,
4836 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4837 diagnostics: bool,
4838 buffer_snapshot: Option<&'a BufferSnapshot>,
4839 ) -> Self {
4840 let mut highlights = None;
4841 if let Some((captures, highlight_maps)) = syntax {
4842 highlights = Some(BufferChunkHighlights {
4843 captures,
4844 next_capture: None,
4845 stack: Default::default(),
4846 highlight_maps,
4847 })
4848 }
4849
4850 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4851 let chunks = text.chunks_in_range(range.clone());
4852
4853 let mut this = BufferChunks {
4854 range,
4855 buffer_snapshot,
4856 chunks,
4857 diagnostic_endpoints,
4858 error_depth: 0,
4859 warning_depth: 0,
4860 information_depth: 0,
4861 hint_depth: 0,
4862 unnecessary_depth: 0,
4863 underline: true,
4864 highlights,
4865 };
4866 this.initialize_diagnostic_endpoints();
4867 this
4868 }
4869
4870 /// Seeks to the given byte offset in the buffer.
4871 pub fn seek(&mut self, range: Range<usize>) {
4872 let old_range = std::mem::replace(&mut self.range, range.clone());
4873 self.chunks.set_range(self.range.clone());
4874 if let Some(highlights) = self.highlights.as_mut() {
4875 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4876 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4877 highlights
4878 .stack
4879 .retain(|(end_offset, _)| *end_offset > range.start);
4880 if let Some(capture) = &highlights.next_capture
4881 && range.start >= capture.node.start_byte()
4882 {
4883 let next_capture_end = capture.node.end_byte();
4884 if range.start < next_capture_end {
4885 highlights.stack.push((
4886 next_capture_end,
4887 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4888 ));
4889 }
4890 highlights.next_capture.take();
4891 }
4892 } else if let Some(snapshot) = self.buffer_snapshot {
4893 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4894 *highlights = BufferChunkHighlights {
4895 captures,
4896 next_capture: None,
4897 stack: Default::default(),
4898 highlight_maps,
4899 };
4900 } else {
4901 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4902 // Seeking such BufferChunks is not supported.
4903 debug_assert!(
4904 false,
4905 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4906 );
4907 }
4908
4909 highlights.captures.set_byte_range(self.range.clone());
4910 self.initialize_diagnostic_endpoints();
4911 }
4912 }
4913
4914 fn initialize_diagnostic_endpoints(&mut self) {
4915 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4916 && let Some(buffer) = self.buffer_snapshot
4917 {
4918 let mut diagnostic_endpoints = Vec::new();
4919 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4920 diagnostic_endpoints.push(DiagnosticEndpoint {
4921 offset: entry.range.start,
4922 is_start: true,
4923 severity: entry.diagnostic.severity,
4924 is_unnecessary: entry.diagnostic.is_unnecessary,
4925 underline: entry.diagnostic.underline,
4926 });
4927 diagnostic_endpoints.push(DiagnosticEndpoint {
4928 offset: entry.range.end,
4929 is_start: false,
4930 severity: entry.diagnostic.severity,
4931 is_unnecessary: entry.diagnostic.is_unnecessary,
4932 underline: entry.diagnostic.underline,
4933 });
4934 }
4935 diagnostic_endpoints
4936 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4937 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4938 self.hint_depth = 0;
4939 self.error_depth = 0;
4940 self.warning_depth = 0;
4941 self.information_depth = 0;
4942 }
4943 }
4944
4945 /// The current byte offset in the buffer.
4946 pub fn offset(&self) -> usize {
4947 self.range.start
4948 }
4949
4950 pub fn range(&self) -> Range<usize> {
4951 self.range.clone()
4952 }
4953
4954 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4955 let depth = match endpoint.severity {
4956 DiagnosticSeverity::ERROR => &mut self.error_depth,
4957 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4958 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4959 DiagnosticSeverity::HINT => &mut self.hint_depth,
4960 _ => return,
4961 };
4962 if endpoint.is_start {
4963 *depth += 1;
4964 } else {
4965 *depth -= 1;
4966 }
4967
4968 if endpoint.is_unnecessary {
4969 if endpoint.is_start {
4970 self.unnecessary_depth += 1;
4971 } else {
4972 self.unnecessary_depth -= 1;
4973 }
4974 }
4975 }
4976
4977 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4978 if self.error_depth > 0 {
4979 Some(DiagnosticSeverity::ERROR)
4980 } else if self.warning_depth > 0 {
4981 Some(DiagnosticSeverity::WARNING)
4982 } else if self.information_depth > 0 {
4983 Some(DiagnosticSeverity::INFORMATION)
4984 } else if self.hint_depth > 0 {
4985 Some(DiagnosticSeverity::HINT)
4986 } else {
4987 None
4988 }
4989 }
4990
4991 fn current_code_is_unnecessary(&self) -> bool {
4992 self.unnecessary_depth > 0
4993 }
4994}
4995
4996impl<'a> Iterator for BufferChunks<'a> {
4997 type Item = Chunk<'a>;
4998
4999 fn next(&mut self) -> Option<Self::Item> {
5000 let mut next_capture_start = usize::MAX;
5001 let mut next_diagnostic_endpoint = usize::MAX;
5002
5003 if let Some(highlights) = self.highlights.as_mut() {
5004 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5005 if *parent_capture_end <= self.range.start {
5006 highlights.stack.pop();
5007 } else {
5008 break;
5009 }
5010 }
5011
5012 if highlights.next_capture.is_none() {
5013 highlights.next_capture = highlights.captures.next();
5014 }
5015
5016 while let Some(capture) = highlights.next_capture.as_ref() {
5017 if self.range.start < capture.node.start_byte() {
5018 next_capture_start = capture.node.start_byte();
5019 break;
5020 } else {
5021 let highlight_id =
5022 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5023 highlights
5024 .stack
5025 .push((capture.node.end_byte(), highlight_id));
5026 highlights.next_capture = highlights.captures.next();
5027 }
5028 }
5029 }
5030
5031 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5032 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5033 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5034 if endpoint.offset <= self.range.start {
5035 self.update_diagnostic_depths(endpoint);
5036 diagnostic_endpoints.next();
5037 self.underline = endpoint.underline;
5038 } else {
5039 next_diagnostic_endpoint = endpoint.offset;
5040 break;
5041 }
5042 }
5043 }
5044 self.diagnostic_endpoints = diagnostic_endpoints;
5045
5046 if let Some(ChunkBitmaps {
5047 text: chunk,
5048 chars: chars_map,
5049 tabs,
5050 }) = self.chunks.peek_with_bitmaps()
5051 {
5052 let chunk_start = self.range.start;
5053 let mut chunk_end = (self.chunks.offset() + chunk.len())
5054 .min(next_capture_start)
5055 .min(next_diagnostic_endpoint);
5056 let mut highlight_id = None;
5057 if let Some(highlights) = self.highlights.as_ref()
5058 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5059 {
5060 chunk_end = chunk_end.min(*parent_capture_end);
5061 highlight_id = Some(*parent_highlight_id);
5062 }
5063 let bit_start = chunk_start - self.chunks.offset();
5064 let bit_end = chunk_end - self.chunks.offset();
5065
5066 let slice = &chunk[bit_start..bit_end];
5067
5068 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5069 let tabs = (tabs >> bit_start) & mask;
5070 let chars = (chars_map >> bit_start) & mask;
5071
5072 self.range.start = chunk_end;
5073 if self.range.start == self.chunks.offset() + chunk.len() {
5074 self.chunks.next().unwrap();
5075 }
5076
5077 Some(Chunk {
5078 text: slice,
5079 syntax_highlight_id: highlight_id,
5080 underline: self.underline,
5081 diagnostic_severity: self.current_diagnostic_severity(),
5082 is_unnecessary: self.current_code_is_unnecessary(),
5083 tabs,
5084 chars,
5085 ..Chunk::default()
5086 })
5087 } else {
5088 None
5089 }
5090 }
5091}
5092
5093impl operation_queue::Operation for Operation {
5094 fn lamport_timestamp(&self) -> clock::Lamport {
5095 match self {
5096 Operation::Buffer(_) => {
5097 unreachable!("buffer operations should never be deferred at this layer")
5098 }
5099 Operation::UpdateDiagnostics {
5100 lamport_timestamp, ..
5101 }
5102 | Operation::UpdateSelections {
5103 lamport_timestamp, ..
5104 }
5105 | Operation::UpdateCompletionTriggers {
5106 lamport_timestamp, ..
5107 }
5108 | Operation::UpdateLineEnding {
5109 lamport_timestamp, ..
5110 } => *lamport_timestamp,
5111 }
5112 }
5113}
5114
5115impl Default for Diagnostic {
5116 fn default() -> Self {
5117 Self {
5118 source: Default::default(),
5119 source_kind: DiagnosticSourceKind::Other,
5120 code: None,
5121 code_description: None,
5122 severity: DiagnosticSeverity::ERROR,
5123 message: Default::default(),
5124 markdown: None,
5125 group_id: 0,
5126 is_primary: false,
5127 is_disk_based: false,
5128 is_unnecessary: false,
5129 underline: true,
5130 data: None,
5131 }
5132 }
5133}
5134
5135impl IndentSize {
5136 /// Returns an [`IndentSize`] representing the given spaces.
5137 pub fn spaces(len: u32) -> Self {
5138 Self {
5139 len,
5140 kind: IndentKind::Space,
5141 }
5142 }
5143
5144 /// Returns an [`IndentSize`] representing a tab.
5145 pub fn tab() -> Self {
5146 Self {
5147 len: 1,
5148 kind: IndentKind::Tab,
5149 }
5150 }
5151
5152 /// An iterator over the characters represented by this [`IndentSize`].
5153 pub fn chars(&self) -> impl Iterator<Item = char> {
5154 iter::repeat(self.char()).take(self.len as usize)
5155 }
5156
5157 /// The character representation of this [`IndentSize`].
5158 pub fn char(&self) -> char {
5159 match self.kind {
5160 IndentKind::Space => ' ',
5161 IndentKind::Tab => '\t',
5162 }
5163 }
5164
5165 /// Consumes the current [`IndentSize`] and returns a new one that has
5166 /// been shrunk or enlarged by the given size along the given direction.
5167 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5168 match direction {
5169 Ordering::Less => {
5170 if self.kind == size.kind && self.len >= size.len {
5171 self.len -= size.len;
5172 }
5173 }
5174 Ordering::Equal => {}
5175 Ordering::Greater => {
5176 if self.len == 0 {
5177 self = size;
5178 } else if self.kind == size.kind {
5179 self.len += size.len;
5180 }
5181 }
5182 }
5183 self
5184 }
5185
5186 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5187 match self.kind {
5188 IndentKind::Space => self.len as usize,
5189 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5190 }
5191 }
5192}
5193
5194#[cfg(any(test, feature = "test-support"))]
5195pub struct TestFile {
5196 pub path: Arc<RelPath>,
5197 pub root_name: String,
5198 pub local_root: Option<PathBuf>,
5199}
5200
5201#[cfg(any(test, feature = "test-support"))]
5202impl File for TestFile {
5203 fn path(&self) -> &Arc<RelPath> {
5204 &self.path
5205 }
5206
5207 fn full_path(&self, _: &gpui::App) -> PathBuf {
5208 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5209 }
5210
5211 fn as_local(&self) -> Option<&dyn LocalFile> {
5212 if self.local_root.is_some() {
5213 Some(self)
5214 } else {
5215 None
5216 }
5217 }
5218
5219 fn disk_state(&self) -> DiskState {
5220 unimplemented!()
5221 }
5222
5223 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5224 self.path().file_name().unwrap_or(self.root_name.as_ref())
5225 }
5226
5227 fn worktree_id(&self, _: &App) -> WorktreeId {
5228 WorktreeId::from_usize(0)
5229 }
5230
5231 fn to_proto(&self, _: &App) -> rpc::proto::File {
5232 unimplemented!()
5233 }
5234
5235 fn is_private(&self) -> bool {
5236 false
5237 }
5238
5239 fn path_style(&self, _cx: &App) -> PathStyle {
5240 PathStyle::local()
5241 }
5242}
5243
5244#[cfg(any(test, feature = "test-support"))]
5245impl LocalFile for TestFile {
5246 fn abs_path(&self, _cx: &App) -> PathBuf {
5247 PathBuf::from(self.local_root.as_ref().unwrap())
5248 .join(&self.root_name)
5249 .join(self.path.as_std_path())
5250 }
5251
5252 fn load(
5253 &self,
5254 _cx: &App,
5255 _encoding: EncodingWrapper,
5256 _force: bool,
5257 _detect_utf16: bool,
5258 _buffer_encoding: Option<Arc<std::sync::Mutex<&'static Encoding>>>,
5259 ) -> Task<Result<String>> {
5260 unimplemented!()
5261 }
5262
5263 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5264 unimplemented!()
5265 }
5266}
5267
5268pub(crate) fn contiguous_ranges(
5269 values: impl Iterator<Item = u32>,
5270 max_len: usize,
5271) -> impl Iterator<Item = Range<u32>> {
5272 let mut values = values;
5273 let mut current_range: Option<Range<u32>> = None;
5274 std::iter::from_fn(move || {
5275 loop {
5276 if let Some(value) = values.next() {
5277 if let Some(range) = &mut current_range
5278 && value == range.end
5279 && range.len() < max_len
5280 {
5281 range.end += 1;
5282 continue;
5283 }
5284
5285 let prev_range = current_range.clone();
5286 current_range = Some(value..(value + 1));
5287 if prev_range.is_some() {
5288 return prev_range;
5289 }
5290 } else {
5291 return current_range.take();
5292 }
5293 }
5294 })
5295}
5296
5297#[derive(Default, Debug)]
5298pub struct CharClassifier {
5299 scope: Option<LanguageScope>,
5300 scope_context: Option<CharScopeContext>,
5301 ignore_punctuation: bool,
5302}
5303
5304impl CharClassifier {
5305 pub fn new(scope: Option<LanguageScope>) -> Self {
5306 Self {
5307 scope,
5308 scope_context: None,
5309 ignore_punctuation: false,
5310 }
5311 }
5312
5313 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5314 Self {
5315 scope_context,
5316 ..self
5317 }
5318 }
5319
5320 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5321 Self {
5322 ignore_punctuation,
5323 ..self
5324 }
5325 }
5326
5327 pub fn is_whitespace(&self, c: char) -> bool {
5328 self.kind(c) == CharKind::Whitespace
5329 }
5330
5331 pub fn is_word(&self, c: char) -> bool {
5332 self.kind(c) == CharKind::Word
5333 }
5334
5335 pub fn is_punctuation(&self, c: char) -> bool {
5336 self.kind(c) == CharKind::Punctuation
5337 }
5338
5339 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5340 if c.is_alphanumeric() || c == '_' {
5341 return CharKind::Word;
5342 }
5343
5344 if let Some(scope) = &self.scope {
5345 let characters = match self.scope_context {
5346 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5347 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5348 None => scope.word_characters(),
5349 };
5350 if let Some(characters) = characters
5351 && characters.contains(&c)
5352 {
5353 return CharKind::Word;
5354 }
5355 }
5356
5357 if c.is_whitespace() {
5358 return CharKind::Whitespace;
5359 }
5360
5361 if ignore_punctuation {
5362 CharKind::Word
5363 } else {
5364 CharKind::Punctuation
5365 }
5366 }
5367
5368 pub fn kind(&self, c: char) -> CharKind {
5369 self.kind_with(c, self.ignore_punctuation)
5370 }
5371}
5372
5373/// Find all of the ranges of whitespace that occur at the ends of lines
5374/// in the given rope.
5375///
5376/// This could also be done with a regex search, but this implementation
5377/// avoids copying text.
5378pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5379 let mut ranges = Vec::new();
5380
5381 let mut offset = 0;
5382 let mut prev_chunk_trailing_whitespace_range = 0..0;
5383 for chunk in rope.chunks() {
5384 let mut prev_line_trailing_whitespace_range = 0..0;
5385 for (i, line) in chunk.split('\n').enumerate() {
5386 let line_end_offset = offset + line.len();
5387 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5388 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5389
5390 if i == 0 && trimmed_line_len == 0 {
5391 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5392 }
5393 if !prev_line_trailing_whitespace_range.is_empty() {
5394 ranges.push(prev_line_trailing_whitespace_range);
5395 }
5396
5397 offset = line_end_offset + 1;
5398 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5399 }
5400
5401 offset -= 1;
5402 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5403 }
5404
5405 if !prev_chunk_trailing_whitespace_range.is_empty() {
5406 ranges.push(prev_chunk_trailing_whitespace_range);
5407 }
5408
5409 ranges
5410}