1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encodings::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<Encoding>,
131 pub observe_file_encoding: Option<gpui::Subscription>,
132}
133
134#[derive(Copy, Clone, Debug, PartialEq, Eq)]
135pub enum ParseStatus {
136 Idle,
137 Parsing,
138}
139
140struct BufferBranchState {
141 base_buffer: Entity<Buffer>,
142 merged_operations: Vec<Lamport>,
143}
144
145/// An immutable, cheaply cloneable representation of a fixed
146/// state of a buffer.
147pub struct BufferSnapshot {
148 pub text: text::BufferSnapshot,
149 pub syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191impl From<settings::CursorShape> for CursorShape {
192 fn from(shape: settings::CursorShape) -> Self {
193 match shape {
194 settings::CursorShape::Bar => CursorShape::Bar,
195 settings::CursorShape::Block => CursorShape::Block,
196 settings::CursorShape::Underline => CursorShape::Underline,
197 settings::CursorShape::Hollow => CursorShape::Hollow,
198 }
199 }
200}
201
202#[derive(Clone, Debug)]
203struct SelectionSet {
204 line_mode: bool,
205 cursor_shape: CursorShape,
206 selections: Arc<[Selection<Anchor>]>,
207 lamport_timestamp: clock::Lamport,
208}
209
210/// A diagnostic associated with a certain range of a buffer.
211#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
212pub struct Diagnostic {
213 /// The name of the service that produced this diagnostic.
214 pub source: Option<String>,
215 /// A machine-readable code that identifies this diagnostic.
216 pub code: Option<NumberOrString>,
217 pub code_description: Option<lsp::Uri>,
218 /// Whether this diagnostic is a hint, warning, or error.
219 pub severity: DiagnosticSeverity,
220 /// The human-readable message associated with this diagnostic.
221 pub message: String,
222 /// The human-readable message (in markdown format)
223 pub markdown: Option<String>,
224 /// An id that identifies the group to which this diagnostic belongs.
225 ///
226 /// When a language server produces a diagnostic with
227 /// one or more associated diagnostics, those diagnostics are all
228 /// assigned a single group ID.
229 pub group_id: usize,
230 /// Whether this diagnostic is the primary diagnostic for its group.
231 ///
232 /// In a given group, the primary diagnostic is the top-level diagnostic
233 /// returned by the language server. The non-primary diagnostics are the
234 /// associated diagnostics.
235 pub is_primary: bool,
236 /// Whether this diagnostic is considered to originate from an analysis of
237 /// files on disk, as opposed to any unsaved buffer contents. This is a
238 /// property of a given diagnostic source, and is configured for a given
239 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
240 /// for the language server.
241 pub is_disk_based: bool,
242 /// Whether this diagnostic marks unnecessary code.
243 pub is_unnecessary: bool,
244 /// Quick separation of diagnostics groups based by their source.
245 pub source_kind: DiagnosticSourceKind,
246 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
247 pub data: Option<Value>,
248 /// Whether to underline the corresponding text range in the editor.
249 pub underline: bool,
250}
251
252#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
253pub enum DiagnosticSourceKind {
254 Pulled,
255 Pushed,
256 Other,
257}
258
259/// An operation used to synchronize this buffer with its other replicas.
260#[derive(Clone, Debug, PartialEq)]
261pub enum Operation {
262 /// A text operation.
263 Buffer(text::Operation),
264
265 /// An update to the buffer's diagnostics.
266 UpdateDiagnostics {
267 /// The id of the language server that produced the new diagnostics.
268 server_id: LanguageServerId,
269 /// The diagnostics.
270 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 },
274
275 /// An update to the most recent selections in this buffer.
276 UpdateSelections {
277 /// The selections.
278 selections: Arc<[Selection<Anchor>]>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// Whether the selections are in 'line mode'.
282 line_mode: bool,
283 /// The [`CursorShape`] associated with these selections.
284 cursor_shape: CursorShape,
285 },
286
287 /// An update to the characters that should trigger autocompletion
288 /// for this buffer.
289 UpdateCompletionTriggers {
290 /// The characters that trigger autocompletion.
291 triggers: Vec<String>,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 /// The language server ID.
295 server_id: LanguageServerId,
296 },
297
298 /// An update to the line ending type of this buffer.
299 UpdateLineEnding {
300 /// The line ending type.
301 line_ending: LineEnding,
302 /// The buffer's lamport timestamp.
303 lamport_timestamp: clock::Lamport,
304 },
305}
306
307/// An event that occurs in a buffer.
308#[derive(Clone, Debug, PartialEq)]
309pub enum BufferEvent {
310 /// The buffer was changed in a way that must be
311 /// propagated to its other replicas.
312 Operation {
313 operation: Operation,
314 is_local: bool,
315 },
316 /// The buffer was edited.
317 Edited,
318 /// The buffer's `dirty` bit changed.
319 DirtyChanged,
320 /// The buffer was saved.
321 Saved,
322 /// The buffer's file was changed on disk.
323 FileHandleChanged,
324 /// The buffer was reloaded.
325 Reloaded,
326 /// The buffer is in need of a reload
327 ReloadNeeded,
328 /// The buffer's language was changed.
329 LanguageChanged,
330 /// The buffer's syntax trees were updated.
331 Reparsed,
332 /// The buffer's diagnostics were updated.
333 DiagnosticsUpdated,
334 /// The buffer gained or lost editing capabilities.
335 CapabilityChanged,
336}
337
338/// The file associated with a buffer.
339pub trait File: Send + Sync + Any {
340 /// Returns the [`LocalFile`] associated with this file, if the
341 /// file is local.
342 fn as_local(&self) -> Option<&dyn LocalFile>;
343
344 /// Returns whether this file is local.
345 fn is_local(&self) -> bool {
346 self.as_local().is_some()
347 }
348
349 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
350 /// only available in some states, such as modification time.
351 fn disk_state(&self) -> DiskState;
352
353 /// Returns the path of this file relative to the worktree's root directory.
354 fn path(&self) -> &Arc<RelPath>;
355
356 /// Returns the path of this file relative to the worktree's parent directory (this means it
357 /// includes the name of the worktree's root folder).
358 fn full_path(&self, cx: &App) -> PathBuf;
359
360 /// Returns the path style of this file.
361 fn path_style(&self, cx: &App) -> PathStyle;
362
363 /// Returns the last component of this handle's absolute path. If this handle refers to the root
364 /// of its worktree, then this method will return the name of the worktree itself.
365 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
366
367 /// Returns the id of the worktree to which this file belongs.
368 ///
369 /// This is needed for looking up project-specific settings.
370 fn worktree_id(&self, cx: &App) -> WorktreeId;
371
372 /// Converts this file into a protobuf message.
373 fn to_proto(&self, cx: &App) -> rpc::proto::File;
374
375 /// Return whether Zed considers this to be a private file.
376 fn is_private(&self) -> bool;
377
378 fn encoding(&self) -> Option<Arc<Encoding>> {
379 unimplemented!()
380 }
381}
382
383/// The file's storage status - whether it's stored (`Present`), and if so when it was last
384/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
385/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
386/// indicator for new files.
387#[derive(Copy, Clone, Debug, PartialEq)]
388pub enum DiskState {
389 /// File created in Zed that has not been saved.
390 New,
391 /// File present on the filesystem.
392 Present { mtime: MTime },
393 /// Deleted file that was previously present.
394 Deleted,
395}
396
397impl DiskState {
398 /// Returns the file's last known modification time on disk.
399 pub fn mtime(self) -> Option<MTime> {
400 match self {
401 DiskState::New => None,
402 DiskState::Present { mtime } => Some(mtime),
403 DiskState::Deleted => None,
404 }
405 }
406
407 pub fn exists(&self) -> bool {
408 match self {
409 DiskState::New => false,
410 DiskState::Present { .. } => true,
411 DiskState::Deleted => false,
412 }
413 }
414}
415
416/// The file associated with a buffer, in the case where the file is on the local disk.
417pub trait LocalFile: File {
418 /// Returns the absolute path of this file
419 fn abs_path(&self, cx: &App) -> PathBuf;
420
421 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
422 fn load(
423 &self,
424 cx: &App,
425 encoding: Encoding,
426 force: bool,
427 detect_utf16: bool,
428 buffer_encoding: Option<Arc<Encoding>>,
429 ) -> Task<Result<String>>;
430
431 /// Loads the file's contents from disk.
432 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
433}
434
435/// The auto-indent behavior associated with an editing operation.
436/// For some editing operations, each affected line of text has its
437/// indentation recomputed. For other operations, the entire block
438/// of edited text is adjusted uniformly.
439#[derive(Clone, Debug)]
440pub enum AutoindentMode {
441 /// Indent each line of inserted text.
442 EachLine,
443 /// Apply the same indentation adjustment to all of the lines
444 /// in a given insertion.
445 Block {
446 /// The original indentation column of the first line of each
447 /// insertion, if it has been copied.
448 ///
449 /// Knowing this makes it possible to preserve the relative indentation
450 /// of every line in the insertion from when it was copied.
451 ///
452 /// If the original indent column is `a`, and the first line of insertion
453 /// is then auto-indented to column `b`, then every other line of
454 /// the insertion will be auto-indented to column `b - a`
455 original_indent_columns: Vec<Option<u32>>,
456 },
457}
458
459#[derive(Clone)]
460struct AutoindentRequest {
461 before_edit: BufferSnapshot,
462 entries: Vec<AutoindentRequestEntry>,
463 is_block_mode: bool,
464 ignore_empty_lines: bool,
465}
466
467#[derive(Debug, Clone)]
468struct AutoindentRequestEntry {
469 /// A range of the buffer whose indentation should be adjusted.
470 range: Range<Anchor>,
471 /// Whether or not these lines should be considered brand new, for the
472 /// purpose of auto-indent. When text is not new, its indentation will
473 /// only be adjusted if the suggested indentation level has *changed*
474 /// since the edit was made.
475 first_line_is_new: bool,
476 indent_size: IndentSize,
477 original_indent_column: Option<u32>,
478}
479
480#[derive(Debug)]
481struct IndentSuggestion {
482 basis_row: u32,
483 delta: Ordering,
484 within_error: bool,
485}
486
487struct BufferChunkHighlights<'a> {
488 captures: SyntaxMapCaptures<'a>,
489 next_capture: Option<SyntaxMapCapture<'a>>,
490 stack: Vec<(usize, HighlightId)>,
491 highlight_maps: Vec<HighlightMap>,
492}
493
494/// An iterator that yields chunks of a buffer's text, along with their
495/// syntax highlights and diagnostic status.
496pub struct BufferChunks<'a> {
497 buffer_snapshot: Option<&'a BufferSnapshot>,
498 range: Range<usize>,
499 chunks: text::Chunks<'a>,
500 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
501 error_depth: usize,
502 warning_depth: usize,
503 information_depth: usize,
504 hint_depth: usize,
505 unnecessary_depth: usize,
506 underline: bool,
507 highlights: Option<BufferChunkHighlights<'a>>,
508}
509
510/// A chunk of a buffer's text, along with its syntax highlight and
511/// diagnostic status.
512#[derive(Clone, Debug, Default)]
513pub struct Chunk<'a> {
514 /// The text of the chunk.
515 pub text: &'a str,
516 /// The syntax highlighting style of the chunk.
517 pub syntax_highlight_id: Option<HighlightId>,
518 /// The highlight style that has been applied to this chunk in
519 /// the editor.
520 pub highlight_style: Option<HighlightStyle>,
521 /// The severity of diagnostic associated with this chunk, if any.
522 pub diagnostic_severity: Option<DiagnosticSeverity>,
523 /// A bitset of which characters are tabs in this string.
524 pub tabs: u128,
525 /// Bitmap of character indices in this chunk
526 pub chars: u128,
527 /// Whether this chunk of text is marked as unnecessary.
528 pub is_unnecessary: bool,
529 /// Whether this chunk of text was originally a tab character.
530 pub is_tab: bool,
531 /// Whether this chunk of text was originally an inlay.
532 pub is_inlay: bool,
533 /// Whether to underline the corresponding text range in the editor.
534 pub underline: bool,
535}
536
537/// A set of edits to a given version of a buffer, computed asynchronously.
538#[derive(Debug)]
539pub struct Diff {
540 pub base_version: clock::Global,
541 pub line_ending: LineEnding,
542 pub edits: Vec<(Range<usize>, Arc<str>)>,
543}
544
545#[derive(Debug, Clone, Copy)]
546pub(crate) struct DiagnosticEndpoint {
547 offset: usize,
548 is_start: bool,
549 underline: bool,
550 severity: DiagnosticSeverity,
551 is_unnecessary: bool,
552}
553
554/// A class of characters, used for characterizing a run of text.
555#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
556pub enum CharKind {
557 /// Whitespace.
558 Whitespace,
559 /// Punctuation.
560 Punctuation,
561 /// Word.
562 Word,
563}
564
565/// Context for character classification within a specific scope.
566#[derive(Copy, Clone, Eq, PartialEq, Debug)]
567pub enum CharScopeContext {
568 /// Character classification for completion queries.
569 ///
570 /// This context treats certain characters as word constituents that would
571 /// normally be considered punctuation, such as '-' in Tailwind classes
572 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
573 Completion,
574 /// Character classification for linked edits.
575 ///
576 /// This context handles characters that should be treated as part of
577 /// identifiers during linked editing operations, such as '.' in JSX
578 /// component names like `<Animated.View>`.
579 LinkedEdit,
580}
581
582/// A runnable is a set of data about a region that could be resolved into a task
583pub struct Runnable {
584 pub tags: SmallVec<[RunnableTag; 1]>,
585 pub language: Arc<Language>,
586 pub buffer: BufferId,
587}
588
589#[derive(Default, Clone, Debug)]
590pub struct HighlightedText {
591 pub text: SharedString,
592 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
593}
594
595#[derive(Default, Debug)]
596struct HighlightedTextBuilder {
597 pub text: String,
598 highlights: Vec<(Range<usize>, HighlightStyle)>,
599}
600
601impl HighlightedText {
602 pub fn from_buffer_range<T: ToOffset>(
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) -> Self {
609 let mut highlighted_text = HighlightedTextBuilder::default();
610 highlighted_text.add_text_from_buffer_range(
611 range,
612 snapshot,
613 syntax_snapshot,
614 override_style,
615 syntax_theme,
616 );
617 highlighted_text.build()
618 }
619
620 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
621 gpui::StyledText::new(self.text.clone())
622 .with_default_highlights(default_style, self.highlights.iter().cloned())
623 }
624
625 /// Returns the first line without leading whitespace unless highlighted
626 /// and a boolean indicating if there are more lines after
627 pub fn first_line_preview(self) -> (Self, bool) {
628 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
629 let first_line = &self.text[..newline_ix];
630
631 // Trim leading whitespace, unless an edit starts prior to it.
632 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
633 if let Some((first_highlight_range, _)) = self.highlights.first() {
634 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
635 }
636
637 let preview_text = &first_line[preview_start_ix..];
638 let preview_highlights = self
639 .highlights
640 .into_iter()
641 .skip_while(|(range, _)| range.end <= preview_start_ix)
642 .take_while(|(range, _)| range.start < newline_ix)
643 .filter_map(|(mut range, highlight)| {
644 range.start = range.start.saturating_sub(preview_start_ix);
645 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
646 if range.is_empty() {
647 None
648 } else {
649 Some((range, highlight))
650 }
651 });
652
653 let preview = Self {
654 text: SharedString::new(preview_text),
655 highlights: preview_highlights.collect(),
656 };
657
658 (preview, self.text.len() > newline_ix)
659 }
660}
661
662impl HighlightedTextBuilder {
663 pub fn build(self) -> HighlightedText {
664 HighlightedText {
665 text: self.text.into(),
666 highlights: self.highlights,
667 }
668 }
669
670 pub fn add_text_from_buffer_range<T: ToOffset>(
671 &mut self,
672 range: Range<T>,
673 snapshot: &text::BufferSnapshot,
674 syntax_snapshot: &SyntaxSnapshot,
675 override_style: Option<HighlightStyle>,
676 syntax_theme: &SyntaxTheme,
677 ) {
678 let range = range.to_offset(snapshot);
679 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
680 let start = self.text.len();
681 self.text.push_str(chunk.text);
682 let end = self.text.len();
683
684 if let Some(highlight_style) = chunk
685 .syntax_highlight_id
686 .and_then(|id| id.style(syntax_theme))
687 {
688 let highlight_style = override_style.map_or(highlight_style, |override_style| {
689 highlight_style.highlight(override_style)
690 });
691 self.highlights.push((start..end, highlight_style));
692 } else if let Some(override_style) = override_style {
693 self.highlights.push((start..end, override_style));
694 }
695 }
696 }
697
698 fn highlighted_chunks<'a>(
699 range: Range<usize>,
700 snapshot: &'a text::BufferSnapshot,
701 syntax_snapshot: &'a SyntaxSnapshot,
702 ) -> BufferChunks<'a> {
703 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
704 grammar
705 .highlights_config
706 .as_ref()
707 .map(|config| &config.query)
708 });
709
710 let highlight_maps = captures
711 .grammars()
712 .iter()
713 .map(|grammar| grammar.highlight_map())
714 .collect();
715
716 BufferChunks::new(
717 snapshot.as_rope(),
718 range,
719 Some((captures, highlight_maps)),
720 false,
721 None,
722 )
723 }
724}
725
726#[derive(Clone)]
727pub struct EditPreview {
728 old_snapshot: text::BufferSnapshot,
729 applied_edits_snapshot: text::BufferSnapshot,
730 syntax_snapshot: SyntaxSnapshot,
731}
732
733impl EditPreview {
734 pub fn highlight_edits(
735 &self,
736 current_snapshot: &BufferSnapshot,
737 edits: &[(Range<Anchor>, String)],
738 include_deletions: bool,
739 cx: &App,
740 ) -> HighlightedText {
741 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
742 return HighlightedText::default();
743 };
744
745 let mut highlighted_text = HighlightedTextBuilder::default();
746
747 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
748
749 let insertion_highlight_style = HighlightStyle {
750 background_color: Some(cx.theme().status().created_background),
751 ..Default::default()
752 };
753 let deletion_highlight_style = HighlightStyle {
754 background_color: Some(cx.theme().status().deleted_background),
755 ..Default::default()
756 };
757 let syntax_theme = cx.theme().syntax();
758
759 for (range, edit_text) in edits {
760 let edit_new_end_in_preview_snapshot = range
761 .end
762 .bias_right(&self.old_snapshot)
763 .to_offset(&self.applied_edits_snapshot);
764 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
765
766 let unchanged_range_in_preview_snapshot =
767 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
768 if !unchanged_range_in_preview_snapshot.is_empty() {
769 highlighted_text.add_text_from_buffer_range(
770 unchanged_range_in_preview_snapshot,
771 &self.applied_edits_snapshot,
772 &self.syntax_snapshot,
773 None,
774 syntax_theme,
775 );
776 }
777
778 let range_in_current_snapshot = range.to_offset(current_snapshot);
779 if include_deletions && !range_in_current_snapshot.is_empty() {
780 highlighted_text.add_text_from_buffer_range(
781 range_in_current_snapshot,
782 ¤t_snapshot.text,
783 ¤t_snapshot.syntax,
784 Some(deletion_highlight_style),
785 syntax_theme,
786 );
787 }
788
789 if !edit_text.is_empty() {
790 highlighted_text.add_text_from_buffer_range(
791 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
792 &self.applied_edits_snapshot,
793 &self.syntax_snapshot,
794 Some(insertion_highlight_style),
795 syntax_theme,
796 );
797 }
798
799 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
800 }
801
802 highlighted_text.add_text_from_buffer_range(
803 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
804 &self.applied_edits_snapshot,
805 &self.syntax_snapshot,
806 None,
807 syntax_theme,
808 );
809
810 highlighted_text.build()
811 }
812
813 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
814 let (first, _) = edits.first()?;
815 let (last, _) = edits.last()?;
816
817 let start = first
818 .start
819 .bias_left(&self.old_snapshot)
820 .to_point(&self.applied_edits_snapshot);
821 let end = last
822 .end
823 .bias_right(&self.old_snapshot)
824 .to_point(&self.applied_edits_snapshot);
825
826 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
827 let range = Point::new(start.row, 0)
828 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
829
830 Some(range.to_offset(&self.applied_edits_snapshot))
831 }
832}
833
834#[derive(Clone, Debug, PartialEq, Eq)]
835pub struct BracketMatch {
836 pub open_range: Range<usize>,
837 pub close_range: Range<usize>,
838 pub newline_only: bool,
839}
840
841impl Buffer {
842 /// Create a new buffer with the given base text.
843 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
844 Self::build(
845 TextBuffer::new(
846 ReplicaId::LOCAL,
847 cx.entity_id().as_non_zero_u64().into(),
848 base_text.into(),
849 &cx.background_executor(),
850 ),
851 None,
852 Capability::ReadWrite,
853 )
854 }
855
856 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
857 /// change the buffer's editing state
858 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
859 self.text = new;
860 self.saved_version = self.version.clone();
861 self.has_unsaved_edits.set((self.version.clone(), false));
862
863 self.was_changed();
864 cx.emit(BufferEvent::DirtyChanged);
865 cx.notify();
866 }
867
868 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
869 pub fn local_normalized(
870 base_text_normalized: Rope,
871 line_ending: LineEnding,
872 cx: &Context<Self>,
873 ) -> Self {
874 Self::build(
875 TextBuffer::new_normalized(
876 ReplicaId::LOCAL,
877 cx.entity_id().as_non_zero_u64().into(),
878 line_ending,
879 base_text_normalized,
880 ),
881 None,
882 Capability::ReadWrite,
883 )
884 }
885
886 /// Create a new buffer that is a replica of a remote buffer.
887 pub fn remote(
888 remote_id: BufferId,
889 replica_id: ReplicaId,
890 capability: Capability,
891 base_text: impl Into<String>,
892 cx: &BackgroundExecutor,
893 ) -> Self {
894 Self::build(
895 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
896 None,
897 capability,
898 )
899 }
900
901 /// Create a new buffer that is a replica of a remote buffer, populating its
902 /// state from the given protobuf message.
903 pub fn from_proto(
904 replica_id: ReplicaId,
905 capability: Capability,
906 message: proto::BufferState,
907 file: Option<Arc<dyn File>>,
908 cx: &BackgroundExecutor,
909 ) -> Result<Self> {
910 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
911 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
912 let mut this = Self::build(buffer, file, capability);
913 this.text.set_line_ending(proto::deserialize_line_ending(
914 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
915 ));
916 this.saved_version = proto::deserialize_version(&message.saved_version);
917 this.saved_mtime = message.saved_mtime.map(|time| time.into());
918 Ok(this)
919 }
920
921 /// Serialize the buffer's state to a protobuf message.
922 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
923 proto::BufferState {
924 id: self.remote_id().into(),
925 file: self.file.as_ref().map(|f| f.to_proto(cx)),
926 base_text: self.base_text().to_string(),
927 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
928 saved_version: proto::serialize_version(&self.saved_version),
929 saved_mtime: self.saved_mtime.map(|time| time.into()),
930 }
931 }
932
933 /// Serialize as protobufs all of the changes to the buffer since the given version.
934 pub fn serialize_ops(
935 &self,
936 since: Option<clock::Global>,
937 cx: &App,
938 ) -> Task<Vec<proto::Operation>> {
939 let mut operations = Vec::new();
940 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
941
942 operations.extend(self.remote_selections.iter().map(|(_, set)| {
943 proto::serialize_operation(&Operation::UpdateSelections {
944 selections: set.selections.clone(),
945 lamport_timestamp: set.lamport_timestamp,
946 line_mode: set.line_mode,
947 cursor_shape: set.cursor_shape,
948 })
949 }));
950
951 for (server_id, diagnostics) in &self.diagnostics {
952 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
953 lamport_timestamp: self.diagnostics_timestamp,
954 server_id: *server_id,
955 diagnostics: diagnostics.iter().cloned().collect(),
956 }));
957 }
958
959 for (server_id, completions) in &self.completion_triggers_per_language_server {
960 operations.push(proto::serialize_operation(
961 &Operation::UpdateCompletionTriggers {
962 triggers: completions.iter().cloned().collect(),
963 lamport_timestamp: self.completion_triggers_timestamp,
964 server_id: *server_id,
965 },
966 ));
967 }
968
969 let text_operations = self.text.operations().clone();
970 cx.background_spawn(async move {
971 let since = since.unwrap_or_default();
972 operations.extend(
973 text_operations
974 .iter()
975 .filter(|(_, op)| !since.observed(op.timestamp()))
976 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
977 );
978 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
979 operations
980 })
981 }
982
983 /// Assign a language to the buffer, returning the buffer.
984 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
985 self.set_language(Some(language), cx);
986 self
987 }
988
989 /// Returns the [`Capability`] of this buffer.
990 pub fn capability(&self) -> Capability {
991 self.capability
992 }
993
994 /// Whether this buffer can only be read.
995 pub fn read_only(&self) -> bool {
996 self.capability == Capability::ReadOnly
997 }
998
999 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1000 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1001 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1002 let snapshot = buffer.snapshot();
1003 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1004 Self {
1005 saved_mtime,
1006 saved_version: buffer.version(),
1007 preview_version: buffer.version(),
1008 reload_task: None,
1009 transaction_depth: 0,
1010 was_dirty_before_starting_transaction: None,
1011 has_unsaved_edits: Cell::new((buffer.version(), false)),
1012 text: buffer,
1013 branch_state: None,
1014 file,
1015 capability,
1016 syntax_map,
1017 reparse: None,
1018 non_text_state_update_count: 0,
1019 sync_parse_timeout: Duration::from_millis(1),
1020 parse_status: watch::channel(ParseStatus::Idle),
1021 autoindent_requests: Default::default(),
1022 wait_for_autoindent_txs: Default::default(),
1023 pending_autoindent: Default::default(),
1024 language: None,
1025 remote_selections: Default::default(),
1026 diagnostics: Default::default(),
1027 diagnostics_timestamp: Lamport::MIN,
1028 completion_triggers: Default::default(),
1029 completion_triggers_per_language_server: Default::default(),
1030 completion_triggers_timestamp: Lamport::MIN,
1031 deferred_ops: OperationQueue::new(),
1032 has_conflict: false,
1033 change_bits: Default::default(),
1034 _subscriptions: Vec::new(),
1035 encoding: Arc::new(Encoding::new(encodings::UTF_8)),
1036 observe_file_encoding: None,
1037 }
1038 }
1039
1040 pub fn build_snapshot(
1041 text: Rope,
1042 language: Option<Arc<Language>>,
1043 language_registry: Option<Arc<LanguageRegistry>>,
1044 cx: &mut App,
1045 ) -> impl Future<Output = BufferSnapshot> + use<> {
1046 let entity_id = cx.reserve_entity::<Self>().entity_id();
1047 let buffer_id = entity_id.as_non_zero_u64().into();
1048 async move {
1049 let text =
1050 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1051 .snapshot();
1052 let mut syntax = SyntaxMap::new(&text).snapshot();
1053 if let Some(language) = language.clone() {
1054 let language_registry = language_registry.clone();
1055 syntax.reparse(&text, language_registry, language);
1056 }
1057 BufferSnapshot {
1058 text,
1059 syntax,
1060 file: None,
1061 diagnostics: Default::default(),
1062 remote_selections: Default::default(),
1063 language,
1064 non_text_state_update_count: 0,
1065 }
1066 }
1067 }
1068
1069 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1070 let entity_id = cx.reserve_entity::<Self>().entity_id();
1071 let buffer_id = entity_id.as_non_zero_u64().into();
1072 let text = TextBuffer::new_normalized(
1073 ReplicaId::LOCAL,
1074 buffer_id,
1075 Default::default(),
1076 Rope::new(),
1077 )
1078 .snapshot();
1079 let syntax = SyntaxMap::new(&text).snapshot();
1080 BufferSnapshot {
1081 text,
1082 syntax,
1083 file: None,
1084 diagnostics: Default::default(),
1085 remote_selections: Default::default(),
1086 language: None,
1087 non_text_state_update_count: 0,
1088 }
1089 }
1090
1091 #[cfg(any(test, feature = "test-support"))]
1092 pub fn build_snapshot_sync(
1093 text: Rope,
1094 language: Option<Arc<Language>>,
1095 language_registry: Option<Arc<LanguageRegistry>>,
1096 cx: &mut App,
1097 ) -> BufferSnapshot {
1098 let entity_id = cx.reserve_entity::<Self>().entity_id();
1099 let buffer_id = entity_id.as_non_zero_u64().into();
1100 let text =
1101 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1102 .snapshot();
1103 let mut syntax = SyntaxMap::new(&text).snapshot();
1104 if let Some(language) = language.clone() {
1105 syntax.reparse(&text, language_registry, language);
1106 }
1107 BufferSnapshot {
1108 text,
1109 syntax,
1110 file: None,
1111 diagnostics: Default::default(),
1112 remote_selections: Default::default(),
1113 language,
1114 non_text_state_update_count: 0,
1115 }
1116 }
1117
1118 /// Retrieve a snapshot of the buffer's current state. This is computationally
1119 /// cheap, and allows reading from the buffer on a background thread.
1120 pub fn snapshot(&self) -> BufferSnapshot {
1121 let text = self.text.snapshot();
1122 let mut syntax_map = self.syntax_map.lock();
1123 syntax_map.interpolate(&text);
1124 let syntax = syntax_map.snapshot();
1125
1126 BufferSnapshot {
1127 text,
1128 syntax,
1129 file: self.file.clone(),
1130 remote_selections: self.remote_selections.clone(),
1131 diagnostics: self.diagnostics.clone(),
1132 language: self.language.clone(),
1133 non_text_state_update_count: self.non_text_state_update_count,
1134 }
1135 }
1136
1137 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1138 let this = cx.entity();
1139 cx.new(|cx| {
1140 let mut branch = Self {
1141 branch_state: Some(BufferBranchState {
1142 base_buffer: this.clone(),
1143 merged_operations: Default::default(),
1144 }),
1145 language: self.language.clone(),
1146 has_conflict: self.has_conflict,
1147 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1148 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1149 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1150 };
1151 if let Some(language_registry) = self.language_registry() {
1152 branch.set_language_registry(language_registry);
1153 }
1154
1155 // Reparse the branch buffer so that we get syntax highlighting immediately.
1156 branch.reparse(cx);
1157
1158 branch
1159 })
1160 }
1161
1162 pub fn preview_edits(
1163 &self,
1164 edits: Arc<[(Range<Anchor>, String)]>,
1165 cx: &App,
1166 ) -> Task<EditPreview> {
1167 let registry = self.language_registry();
1168 let language = self.language().cloned();
1169 let old_snapshot = self.text.snapshot();
1170 let mut branch_buffer = self.text.branch();
1171 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1172 let executor = cx.background_executor().clone();
1173 cx.background_spawn(async move {
1174 if !edits.is_empty() {
1175 if let Some(language) = language.clone() {
1176 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1177 }
1178
1179 branch_buffer.edit(edits.iter().cloned(), &executor);
1180 let snapshot = branch_buffer.snapshot();
1181 syntax_snapshot.interpolate(&snapshot);
1182
1183 if let Some(language) = language {
1184 syntax_snapshot.reparse(&snapshot, registry, language);
1185 }
1186 }
1187 EditPreview {
1188 old_snapshot,
1189 applied_edits_snapshot: branch_buffer.snapshot(),
1190 syntax_snapshot,
1191 }
1192 })
1193 }
1194
1195 /// Applies all of the changes in this buffer that intersect any of the
1196 /// given `ranges` to its base buffer.
1197 ///
1198 /// If `ranges` is empty, then all changes will be applied. This buffer must
1199 /// be a branch buffer to call this method.
1200 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1201 let Some(base_buffer) = self.base_buffer() else {
1202 debug_panic!("not a branch buffer");
1203 return;
1204 };
1205
1206 let mut ranges = if ranges.is_empty() {
1207 &[0..usize::MAX]
1208 } else {
1209 ranges.as_slice()
1210 }
1211 .iter()
1212 .peekable();
1213
1214 let mut edits = Vec::new();
1215 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1216 let mut is_included = false;
1217 while let Some(range) = ranges.peek() {
1218 if range.end < edit.new.start {
1219 ranges.next().unwrap();
1220 } else {
1221 if range.start <= edit.new.end {
1222 is_included = true;
1223 }
1224 break;
1225 }
1226 }
1227
1228 if is_included {
1229 edits.push((
1230 edit.old.clone(),
1231 self.text_for_range(edit.new.clone()).collect::<String>(),
1232 ));
1233 }
1234 }
1235
1236 let operation = base_buffer.update(cx, |base_buffer, cx| {
1237 // cx.emit(BufferEvent::DiffBaseChanged);
1238 base_buffer.edit(edits, None, cx)
1239 });
1240
1241 if let Some(operation) = operation
1242 && let Some(BufferBranchState {
1243 merged_operations, ..
1244 }) = &mut self.branch_state
1245 {
1246 merged_operations.push(operation);
1247 }
1248 }
1249
1250 fn on_base_buffer_event(
1251 &mut self,
1252 _: Entity<Buffer>,
1253 event: &BufferEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 let BufferEvent::Operation { operation, .. } = event else {
1257 return;
1258 };
1259 let Some(BufferBranchState {
1260 merged_operations, ..
1261 }) = &mut self.branch_state
1262 else {
1263 return;
1264 };
1265
1266 let mut operation_to_undo = None;
1267 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1268 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1269 {
1270 merged_operations.remove(ix);
1271 operation_to_undo = Some(operation.timestamp);
1272 }
1273
1274 self.apply_ops([operation.clone()], cx);
1275
1276 if let Some(timestamp) = operation_to_undo {
1277 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1278 self.undo_operations(counts, cx);
1279 }
1280 }
1281
1282 #[cfg(test)]
1283 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1284 &self.text
1285 }
1286
1287 /// Retrieve a snapshot of the buffer's raw text, without any
1288 /// language-related state like the syntax tree or diagnostics.
1289 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1290 self.text.snapshot()
1291 }
1292
1293 /// The file associated with the buffer, if any.
1294 pub fn file(&self) -> Option<&Arc<dyn File>> {
1295 self.file.as_ref()
1296 }
1297
1298 /// The version of the buffer that was last saved or reloaded from disk.
1299 pub fn saved_version(&self) -> &clock::Global {
1300 &self.saved_version
1301 }
1302
1303 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1304 pub fn saved_mtime(&self) -> Option<MTime> {
1305 self.saved_mtime
1306 }
1307
1308 /// Assign a language to the buffer.
1309 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1310 self.non_text_state_update_count += 1;
1311 self.syntax_map.lock().clear(&self.text);
1312 self.language = language;
1313 self.was_changed();
1314 self.reparse(cx);
1315 cx.emit(BufferEvent::LanguageChanged);
1316 }
1317
1318 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1319 /// other languages if parts of the buffer are written in different languages.
1320 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1321 self.syntax_map
1322 .lock()
1323 .set_language_registry(language_registry);
1324 }
1325
1326 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1327 self.syntax_map.lock().language_registry()
1328 }
1329
1330 /// Assign the line ending type to the buffer.
1331 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1332 self.text.set_line_ending(line_ending);
1333
1334 let lamport_timestamp = self.text.lamport_clock.tick();
1335 self.send_operation(
1336 Operation::UpdateLineEnding {
1337 line_ending,
1338 lamport_timestamp,
1339 },
1340 true,
1341 cx,
1342 );
1343 }
1344
1345 /// Assign the buffer a new [`Capability`].
1346 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1347 if self.capability != capability {
1348 self.capability = capability;
1349 cx.emit(BufferEvent::CapabilityChanged)
1350 }
1351 }
1352
1353 /// This method is called to signal that the buffer has been saved.
1354 pub fn did_save(
1355 &mut self,
1356 version: clock::Global,
1357 mtime: Option<MTime>,
1358 cx: &mut Context<Self>,
1359 ) {
1360 self.saved_version = version.clone();
1361 self.has_unsaved_edits.set((version, false));
1362 self.has_conflict = false;
1363 self.saved_mtime = mtime;
1364 self.was_changed();
1365 cx.emit(BufferEvent::Saved);
1366 cx.notify();
1367 }
1368
1369 /// Reloads the contents of the buffer from disk.
1370 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1371 let (tx, rx) = futures::channel::oneshot::channel();
1372 let encoding = self.encoding.clone();
1373
1374 let buffer_encoding = self.encoding.clone();
1375
1376 let prev_version = self.text.version();
1377 self.reload_task = Some(cx.spawn(async move |this, cx| {
1378 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1379 let file = this.file.as_ref()?.as_local()?;
1380 Some((file.disk_state().mtime(), {
1381 file.load(cx, (*encoding).clone(), false, true, Some(buffer_encoding))
1382 }))
1383 })?
1384 else {
1385 return Ok(());
1386 };
1387
1388 let new_text = new_text.await?;
1389 let diff = this
1390 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1391 .await;
1392 this.update(cx, |this, cx| {
1393 if this.version() == diff.base_version {
1394 this.finalize_last_transaction();
1395 this.apply_diff(diff, cx);
1396 tx.send(this.finalize_last_transaction().cloned()).ok();
1397 this.has_conflict = false;
1398 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1399 } else {
1400 if !diff.edits.is_empty()
1401 || this
1402 .edits_since::<usize>(&diff.base_version)
1403 .next()
1404 .is_some()
1405 {
1406 this.has_conflict = true;
1407 }
1408
1409 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1410 }
1411
1412 this.reload_task.take();
1413 })
1414 }));
1415 rx
1416 }
1417
1418 /// This method is called to signal that the buffer has been reloaded.
1419 pub fn did_reload(
1420 &mut self,
1421 version: clock::Global,
1422 line_ending: LineEnding,
1423 mtime: Option<MTime>,
1424 cx: &mut Context<Self>,
1425 ) {
1426 self.saved_version = version;
1427 self.has_unsaved_edits
1428 .set((self.saved_version.clone(), false));
1429 self.text.set_line_ending(line_ending);
1430 self.saved_mtime = mtime;
1431 cx.emit(BufferEvent::Reloaded);
1432 cx.notify();
1433 }
1434
1435 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1436 self.file = Some(new_file);
1437 }
1438 /// Updates the [`File`] backing this buffer. This should be called when
1439 /// the file has changed or has been deleted.
1440 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1441 let was_dirty = self.is_dirty();
1442 let mut file_changed = false;
1443
1444 if let Some(old_file) = self.file.as_ref() {
1445 if new_file.path() != old_file.path() {
1446 file_changed = true;
1447 }
1448
1449 let old_state = old_file.disk_state();
1450 let new_state = new_file.disk_state();
1451 if old_state != new_state {
1452 file_changed = true;
1453 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1454 cx.emit(BufferEvent::ReloadNeeded)
1455 }
1456 }
1457 } else {
1458 file_changed = true;
1459 };
1460
1461 self.file = Some(new_file);
1462 if file_changed {
1463 self.was_changed();
1464 self.non_text_state_update_count += 1;
1465 if was_dirty != self.is_dirty() {
1466 cx.emit(BufferEvent::DirtyChanged);
1467 }
1468 cx.emit(BufferEvent::FileHandleChanged);
1469 cx.notify();
1470 }
1471 }
1472
1473 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1474 Some(self.branch_state.as_ref()?.base_buffer.clone())
1475 }
1476
1477 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1478 pub fn language(&self) -> Option<&Arc<Language>> {
1479 self.language.as_ref()
1480 }
1481
1482 /// Returns the [`Language`] at the given location.
1483 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1484 let offset = position.to_offset(self);
1485 let mut is_first = true;
1486 let start_anchor = self.anchor_before(offset);
1487 let end_anchor = self.anchor_after(offset);
1488 self.syntax_map
1489 .lock()
1490 .layers_for_range(offset..offset, &self.text, false)
1491 .filter(|layer| {
1492 if is_first {
1493 is_first = false;
1494 return true;
1495 }
1496
1497 layer
1498 .included_sub_ranges
1499 .map(|sub_ranges| {
1500 sub_ranges.iter().any(|sub_range| {
1501 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1502 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1503 !is_before_start && !is_after_end
1504 })
1505 })
1506 .unwrap_or(true)
1507 })
1508 .last()
1509 .map(|info| info.language.clone())
1510 .or_else(|| self.language.clone())
1511 }
1512
1513 /// Returns each [`Language`] for the active syntax layers at the given location.
1514 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1515 let offset = position.to_offset(self);
1516 let mut languages: Vec<Arc<Language>> = self
1517 .syntax_map
1518 .lock()
1519 .layers_for_range(offset..offset, &self.text, false)
1520 .map(|info| info.language.clone())
1521 .collect();
1522
1523 if languages.is_empty()
1524 && let Some(buffer_language) = self.language()
1525 {
1526 languages.push(buffer_language.clone());
1527 }
1528
1529 languages
1530 }
1531
1532 /// An integer version number that accounts for all updates besides
1533 /// the buffer's text itself (which is versioned via a version vector).
1534 pub fn non_text_state_update_count(&self) -> usize {
1535 self.non_text_state_update_count
1536 }
1537
1538 /// Whether the buffer is being parsed in the background.
1539 #[cfg(any(test, feature = "test-support"))]
1540 pub fn is_parsing(&self) -> bool {
1541 self.reparse.is_some()
1542 }
1543
1544 /// Indicates whether the buffer contains any regions that may be
1545 /// written in a language that hasn't been loaded yet.
1546 pub fn contains_unknown_injections(&self) -> bool {
1547 self.syntax_map.lock().contains_unknown_injections()
1548 }
1549
1550 #[cfg(any(test, feature = "test-support"))]
1551 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1552 self.sync_parse_timeout = timeout;
1553 }
1554
1555 /// Called after an edit to synchronize the buffer's main parse tree with
1556 /// the buffer's new underlying state.
1557 ///
1558 /// Locks the syntax map and interpolates the edits since the last reparse
1559 /// into the foreground syntax tree.
1560 ///
1561 /// Then takes a stable snapshot of the syntax map before unlocking it.
1562 /// The snapshot with the interpolated edits is sent to a background thread,
1563 /// where we ask Tree-sitter to perform an incremental parse.
1564 ///
1565 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1566 /// waiting on the parse to complete. As soon as it completes, we proceed
1567 /// synchronously, unless a 1ms timeout elapses.
1568 ///
1569 /// If we time out waiting on the parse, we spawn a second task waiting
1570 /// until the parse does complete and return with the interpolated tree still
1571 /// in the foreground. When the background parse completes, call back into
1572 /// the main thread and assign the foreground parse state.
1573 ///
1574 /// If the buffer or grammar changed since the start of the background parse,
1575 /// initiate an additional reparse recursively. To avoid concurrent parses
1576 /// for the same buffer, we only initiate a new parse if we are not already
1577 /// parsing in the background.
1578 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1579 if self.reparse.is_some() {
1580 return;
1581 }
1582 let language = if let Some(language) = self.language.clone() {
1583 language
1584 } else {
1585 return;
1586 };
1587
1588 let text = self.text_snapshot();
1589 let parsed_version = self.version();
1590
1591 let mut syntax_map = self.syntax_map.lock();
1592 syntax_map.interpolate(&text);
1593 let language_registry = syntax_map.language_registry();
1594 let mut syntax_snapshot = syntax_map.snapshot();
1595 drop(syntax_map);
1596
1597 let parse_task = cx.background_spawn({
1598 let language = language.clone();
1599 let language_registry = language_registry.clone();
1600 async move {
1601 syntax_snapshot.reparse(&text, language_registry, language);
1602 syntax_snapshot
1603 }
1604 });
1605
1606 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1607 match cx
1608 .background_executor()
1609 .block_with_timeout(self.sync_parse_timeout, parse_task)
1610 {
1611 Ok(new_syntax_snapshot) => {
1612 self.did_finish_parsing(new_syntax_snapshot, cx);
1613 self.reparse = None;
1614 }
1615 Err(parse_task) => {
1616 // todo(lw): hot foreground spawn
1617 self.reparse = Some(cx.spawn(async move |this, cx| {
1618 let new_syntax_map = cx.background_spawn(parse_task).await;
1619 this.update(cx, move |this, cx| {
1620 let grammar_changed = || {
1621 this.language.as_ref().is_none_or(|current_language| {
1622 !Arc::ptr_eq(&language, current_language)
1623 })
1624 };
1625 let language_registry_changed = || {
1626 new_syntax_map.contains_unknown_injections()
1627 && language_registry.is_some_and(|registry| {
1628 registry.version() != new_syntax_map.language_registry_version()
1629 })
1630 };
1631 let parse_again = this.version.changed_since(&parsed_version)
1632 || language_registry_changed()
1633 || grammar_changed();
1634 this.did_finish_parsing(new_syntax_map, cx);
1635 this.reparse = None;
1636 if parse_again {
1637 this.reparse(cx);
1638 }
1639 })
1640 .ok();
1641 }));
1642 }
1643 }
1644 }
1645
1646 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1647 self.was_changed();
1648 self.non_text_state_update_count += 1;
1649 self.syntax_map.lock().did_parse(syntax_snapshot);
1650 self.request_autoindent(cx);
1651 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1652 cx.emit(BufferEvent::Reparsed);
1653 cx.notify();
1654 }
1655
1656 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1657 self.parse_status.1.clone()
1658 }
1659
1660 /// Assign to the buffer a set of diagnostics created by a given language server.
1661 pub fn update_diagnostics(
1662 &mut self,
1663 server_id: LanguageServerId,
1664 diagnostics: DiagnosticSet,
1665 cx: &mut Context<Self>,
1666 ) {
1667 let lamport_timestamp = self.text.lamport_clock.tick();
1668 let op = Operation::UpdateDiagnostics {
1669 server_id,
1670 diagnostics: diagnostics.iter().cloned().collect(),
1671 lamport_timestamp,
1672 };
1673
1674 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1675 self.send_operation(op, true, cx);
1676 }
1677
1678 pub fn buffer_diagnostics(
1679 &self,
1680 for_server: Option<LanguageServerId>,
1681 ) -> Vec<&DiagnosticEntry<Anchor>> {
1682 match for_server {
1683 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1684 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1685 Err(_) => Vec::new(),
1686 },
1687 None => self
1688 .diagnostics
1689 .iter()
1690 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1691 .collect(),
1692 }
1693 }
1694
1695 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1696 if let Some(indent_sizes) = self.compute_autoindents() {
1697 let indent_sizes = cx.background_spawn(indent_sizes);
1698 match cx
1699 .background_executor()
1700 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1701 {
1702 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1703 Err(indent_sizes) => {
1704 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1705 let indent_sizes = indent_sizes.await;
1706 this.update(cx, |this, cx| {
1707 this.apply_autoindents(indent_sizes, cx);
1708 })
1709 .ok();
1710 }));
1711 }
1712 }
1713 } else {
1714 self.autoindent_requests.clear();
1715 for tx in self.wait_for_autoindent_txs.drain(..) {
1716 tx.send(()).ok();
1717 }
1718 }
1719 }
1720
1721 fn compute_autoindents(
1722 &self,
1723 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1724 let max_rows_between_yields = 100;
1725 let snapshot = self.snapshot();
1726 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1727 return None;
1728 }
1729
1730 let autoindent_requests = self.autoindent_requests.clone();
1731 Some(async move {
1732 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1733 for request in autoindent_requests {
1734 // Resolve each edited range to its row in the current buffer and in the
1735 // buffer before this batch of edits.
1736 let mut row_ranges = Vec::new();
1737 let mut old_to_new_rows = BTreeMap::new();
1738 let mut language_indent_sizes_by_new_row = Vec::new();
1739 for entry in &request.entries {
1740 let position = entry.range.start;
1741 let new_row = position.to_point(&snapshot).row;
1742 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1743 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1744
1745 if !entry.first_line_is_new {
1746 let old_row = position.to_point(&request.before_edit).row;
1747 old_to_new_rows.insert(old_row, new_row);
1748 }
1749 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1750 }
1751
1752 // Build a map containing the suggested indentation for each of the edited lines
1753 // with respect to the state of the buffer before these edits. This map is keyed
1754 // by the rows for these lines in the current state of the buffer.
1755 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1756 let old_edited_ranges =
1757 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1758 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1759 let mut language_indent_size = IndentSize::default();
1760 for old_edited_range in old_edited_ranges {
1761 let suggestions = request
1762 .before_edit
1763 .suggest_autoindents(old_edited_range.clone())
1764 .into_iter()
1765 .flatten();
1766 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1767 if let Some(suggestion) = suggestion {
1768 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1769
1770 // Find the indent size based on the language for this row.
1771 while let Some((row, size)) = language_indent_sizes.peek() {
1772 if *row > new_row {
1773 break;
1774 }
1775 language_indent_size = *size;
1776 language_indent_sizes.next();
1777 }
1778
1779 let suggested_indent = old_to_new_rows
1780 .get(&suggestion.basis_row)
1781 .and_then(|from_row| {
1782 Some(old_suggestions.get(from_row).copied()?.0)
1783 })
1784 .unwrap_or_else(|| {
1785 request
1786 .before_edit
1787 .indent_size_for_line(suggestion.basis_row)
1788 })
1789 .with_delta(suggestion.delta, language_indent_size);
1790 old_suggestions
1791 .insert(new_row, (suggested_indent, suggestion.within_error));
1792 }
1793 }
1794 yield_now().await;
1795 }
1796
1797 // Compute new suggestions for each line, but only include them in the result
1798 // if they differ from the old suggestion for that line.
1799 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1800 let mut language_indent_size = IndentSize::default();
1801 for (row_range, original_indent_column) in row_ranges {
1802 let new_edited_row_range = if request.is_block_mode {
1803 row_range.start..row_range.start + 1
1804 } else {
1805 row_range.clone()
1806 };
1807
1808 let suggestions = snapshot
1809 .suggest_autoindents(new_edited_row_range.clone())
1810 .into_iter()
1811 .flatten();
1812 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1813 if let Some(suggestion) = suggestion {
1814 // Find the indent size based on the language for this row.
1815 while let Some((row, size)) = language_indent_sizes.peek() {
1816 if *row > new_row {
1817 break;
1818 }
1819 language_indent_size = *size;
1820 language_indent_sizes.next();
1821 }
1822
1823 let suggested_indent = indent_sizes
1824 .get(&suggestion.basis_row)
1825 .copied()
1826 .map(|e| e.0)
1827 .unwrap_or_else(|| {
1828 snapshot.indent_size_for_line(suggestion.basis_row)
1829 })
1830 .with_delta(suggestion.delta, language_indent_size);
1831
1832 if old_suggestions.get(&new_row).is_none_or(
1833 |(old_indentation, was_within_error)| {
1834 suggested_indent != *old_indentation
1835 && (!suggestion.within_error || *was_within_error)
1836 },
1837 ) {
1838 indent_sizes.insert(
1839 new_row,
1840 (suggested_indent, request.ignore_empty_lines),
1841 );
1842 }
1843 }
1844 }
1845
1846 if let (true, Some(original_indent_column)) =
1847 (request.is_block_mode, original_indent_column)
1848 {
1849 let new_indent =
1850 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1851 *indent
1852 } else {
1853 snapshot.indent_size_for_line(row_range.start)
1854 };
1855 let delta = new_indent.len as i64 - original_indent_column as i64;
1856 if delta != 0 {
1857 for row in row_range.skip(1) {
1858 indent_sizes.entry(row).or_insert_with(|| {
1859 let mut size = snapshot.indent_size_for_line(row);
1860 if size.kind == new_indent.kind {
1861 match delta.cmp(&0) {
1862 Ordering::Greater => size.len += delta as u32,
1863 Ordering::Less => {
1864 size.len = size.len.saturating_sub(-delta as u32)
1865 }
1866 Ordering::Equal => {}
1867 }
1868 }
1869 (size, request.ignore_empty_lines)
1870 });
1871 }
1872 }
1873 }
1874
1875 yield_now().await;
1876 }
1877 }
1878
1879 indent_sizes
1880 .into_iter()
1881 .filter_map(|(row, (indent, ignore_empty_lines))| {
1882 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1883 None
1884 } else {
1885 Some((row, indent))
1886 }
1887 })
1888 .collect()
1889 })
1890 }
1891
1892 fn apply_autoindents(
1893 &mut self,
1894 indent_sizes: BTreeMap<u32, IndentSize>,
1895 cx: &mut Context<Self>,
1896 ) {
1897 self.autoindent_requests.clear();
1898 for tx in self.wait_for_autoindent_txs.drain(..) {
1899 tx.send(()).ok();
1900 }
1901
1902 let edits: Vec<_> = indent_sizes
1903 .into_iter()
1904 .filter_map(|(row, indent_size)| {
1905 let current_size = indent_size_for_line(self, row);
1906 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1907 })
1908 .collect();
1909
1910 let preserve_preview = self.preserve_preview();
1911 self.edit(edits, None, cx);
1912 if preserve_preview {
1913 self.refresh_preview();
1914 }
1915 }
1916
1917 /// Create a minimal edit that will cause the given row to be indented
1918 /// with the given size. After applying this edit, the length of the line
1919 /// will always be at least `new_size.len`.
1920 pub fn edit_for_indent_size_adjustment(
1921 row: u32,
1922 current_size: IndentSize,
1923 new_size: IndentSize,
1924 ) -> Option<(Range<Point>, String)> {
1925 if new_size.kind == current_size.kind {
1926 match new_size.len.cmp(¤t_size.len) {
1927 Ordering::Greater => {
1928 let point = Point::new(row, 0);
1929 Some((
1930 point..point,
1931 iter::repeat(new_size.char())
1932 .take((new_size.len - current_size.len) as usize)
1933 .collect::<String>(),
1934 ))
1935 }
1936
1937 Ordering::Less => Some((
1938 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1939 String::new(),
1940 )),
1941
1942 Ordering::Equal => None,
1943 }
1944 } else {
1945 Some((
1946 Point::new(row, 0)..Point::new(row, current_size.len),
1947 iter::repeat(new_size.char())
1948 .take(new_size.len as usize)
1949 .collect::<String>(),
1950 ))
1951 }
1952 }
1953
1954 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1955 /// and the given new text.
1956 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1957 let old_text = self.as_rope().clone();
1958 let base_version = self.version();
1959 cx.background_executor()
1960 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1961 let old_text = old_text.to_string();
1962 let line_ending = LineEnding::detect(&new_text);
1963 LineEnding::normalize(&mut new_text);
1964 let edits = text_diff(&old_text, &new_text);
1965 Diff {
1966 base_version,
1967 line_ending,
1968 edits,
1969 }
1970 })
1971 }
1972
1973 /// Spawns a background task that searches the buffer for any whitespace
1974 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1975 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1976 let old_text = self.as_rope().clone();
1977 let line_ending = self.line_ending();
1978 let base_version = self.version();
1979 cx.background_spawn(async move {
1980 let ranges = trailing_whitespace_ranges(&old_text);
1981 let empty = Arc::<str>::from("");
1982 Diff {
1983 base_version,
1984 line_ending,
1985 edits: ranges
1986 .into_iter()
1987 .map(|range| (range, empty.clone()))
1988 .collect(),
1989 }
1990 })
1991 }
1992
1993 /// Ensures that the buffer ends with a single newline character, and
1994 /// no other whitespace. Skips if the buffer is empty.
1995 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1996 let len = self.len();
1997 if len == 0 {
1998 return;
1999 }
2000 let mut offset = len;
2001 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2002 let non_whitespace_len = chunk
2003 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2004 .len();
2005 offset -= chunk.len();
2006 offset += non_whitespace_len;
2007 if non_whitespace_len != 0 {
2008 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2009 return;
2010 }
2011 break;
2012 }
2013 }
2014 self.edit([(offset..len, "\n")], None, cx);
2015 }
2016
2017 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2018 /// calculated, then adjust the diff to account for those changes, and discard any
2019 /// parts of the diff that conflict with those changes.
2020 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2021 let snapshot = self.snapshot();
2022 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2023 let mut delta = 0;
2024 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2025 while let Some(edit_since) = edits_since.peek() {
2026 // If the edit occurs after a diff hunk, then it does not
2027 // affect that hunk.
2028 if edit_since.old.start > range.end {
2029 break;
2030 }
2031 // If the edit precedes the diff hunk, then adjust the hunk
2032 // to reflect the edit.
2033 else if edit_since.old.end < range.start {
2034 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2035 edits_since.next();
2036 }
2037 // If the edit intersects a diff hunk, then discard that hunk.
2038 else {
2039 return None;
2040 }
2041 }
2042
2043 let start = (range.start as i64 + delta) as usize;
2044 let end = (range.end as i64 + delta) as usize;
2045 Some((start..end, new_text))
2046 });
2047
2048 self.start_transaction();
2049 self.text.set_line_ending(diff.line_ending);
2050 self.edit(adjusted_edits, None, cx);
2051 self.end_transaction(cx)
2052 }
2053
2054 pub fn has_unsaved_edits(&self) -> bool {
2055 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2056
2057 if last_version == self.version {
2058 self.has_unsaved_edits
2059 .set((last_version, has_unsaved_edits));
2060 return has_unsaved_edits;
2061 }
2062
2063 let has_edits = self.has_edits_since(&self.saved_version);
2064 self.has_unsaved_edits
2065 .set((self.version.clone(), has_edits));
2066 has_edits
2067 }
2068
2069 /// Checks if the buffer has unsaved changes.
2070 pub fn is_dirty(&self) -> bool {
2071 if self.capability == Capability::ReadOnly {
2072 return false;
2073 }
2074 if self.has_conflict {
2075 return true;
2076 }
2077 match self.file.as_ref().map(|f| f.disk_state()) {
2078 Some(DiskState::New) | Some(DiskState::Deleted) => {
2079 !self.is_empty() && self.has_unsaved_edits()
2080 }
2081 _ => self.has_unsaved_edits(),
2082 }
2083 }
2084
2085 /// Checks if the buffer and its file have both changed since the buffer
2086 /// was last saved or reloaded.
2087 pub fn has_conflict(&self) -> bool {
2088 if self.has_conflict {
2089 return true;
2090 }
2091 let Some(file) = self.file.as_ref() else {
2092 return false;
2093 };
2094 match file.disk_state() {
2095 DiskState::New => false,
2096 DiskState::Present { mtime } => match self.saved_mtime {
2097 Some(saved_mtime) => {
2098 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2099 }
2100 None => true,
2101 },
2102 DiskState::Deleted => false,
2103 }
2104 }
2105
2106 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2107 pub fn subscribe(&mut self) -> Subscription {
2108 self.text.subscribe()
2109 }
2110
2111 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2112 ///
2113 /// This allows downstream code to check if the buffer's text has changed without
2114 /// waiting for an effect cycle, which would be required if using eents.
2115 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2116 if let Err(ix) = self
2117 .change_bits
2118 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2119 {
2120 self.change_bits.insert(ix, bit);
2121 }
2122 }
2123
2124 /// Set the change bit for all "listeners".
2125 fn was_changed(&mut self) {
2126 self.change_bits.retain(|change_bit| {
2127 change_bit
2128 .upgrade()
2129 .inspect(|bit| {
2130 _ = bit.replace(true);
2131 })
2132 .is_some()
2133 });
2134 }
2135
2136 /// Starts a transaction, if one is not already in-progress. When undoing or
2137 /// redoing edits, all of the edits performed within a transaction are undone
2138 /// or redone together.
2139 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2140 self.start_transaction_at(Instant::now())
2141 }
2142
2143 /// Starts a transaction, providing the current time. Subsequent transactions
2144 /// that occur within a short period of time will be grouped together. This
2145 /// is controlled by the buffer's undo grouping duration.
2146 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2147 self.transaction_depth += 1;
2148 if self.was_dirty_before_starting_transaction.is_none() {
2149 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2150 }
2151 self.text.start_transaction_at(now)
2152 }
2153
2154 /// Terminates the current transaction, if this is the outermost transaction.
2155 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2156 self.end_transaction_at(Instant::now(), cx)
2157 }
2158
2159 /// Terminates the current transaction, providing the current time. Subsequent transactions
2160 /// that occur within a short period of time will be grouped together. This
2161 /// is controlled by the buffer's undo grouping duration.
2162 pub fn end_transaction_at(
2163 &mut self,
2164 now: Instant,
2165 cx: &mut Context<Self>,
2166 ) -> Option<TransactionId> {
2167 assert!(self.transaction_depth > 0);
2168 self.transaction_depth -= 1;
2169 let was_dirty = if self.transaction_depth == 0 {
2170 self.was_dirty_before_starting_transaction.take().unwrap()
2171 } else {
2172 false
2173 };
2174 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2175 self.did_edit(&start_version, was_dirty, cx);
2176 Some(transaction_id)
2177 } else {
2178 None
2179 }
2180 }
2181
2182 /// Manually add a transaction to the buffer's undo history.
2183 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2184 self.text.push_transaction(transaction, now);
2185 }
2186
2187 /// Differs from `push_transaction` in that it does not clear the redo
2188 /// stack. Intended to be used to create a parent transaction to merge
2189 /// potential child transactions into.
2190 ///
2191 /// The caller is responsible for removing it from the undo history using
2192 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2193 /// are merged into this transaction, the caller is responsible for ensuring
2194 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2195 /// cleared is to create transactions with the usual `start_transaction` and
2196 /// `end_transaction` methods and merging the resulting transactions into
2197 /// the transaction created by this method
2198 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2199 self.text.push_empty_transaction(now)
2200 }
2201
2202 /// Prevent the last transaction from being grouped with any subsequent transactions,
2203 /// even if they occur with the buffer's undo grouping duration.
2204 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2205 self.text.finalize_last_transaction()
2206 }
2207
2208 /// Manually group all changes since a given transaction.
2209 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2210 self.text.group_until_transaction(transaction_id);
2211 }
2212
2213 /// Manually remove a transaction from the buffer's undo history
2214 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2215 self.text.forget_transaction(transaction_id)
2216 }
2217
2218 /// Retrieve a transaction from the buffer's undo history
2219 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2220 self.text.get_transaction(transaction_id)
2221 }
2222
2223 /// Manually merge two transactions in the buffer's undo history.
2224 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2225 self.text.merge_transactions(transaction, destination);
2226 }
2227
2228 /// Waits for the buffer to receive operations with the given timestamps.
2229 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2230 &mut self,
2231 edit_ids: It,
2232 ) -> impl Future<Output = Result<()>> + use<It> {
2233 self.text.wait_for_edits(edit_ids)
2234 }
2235
2236 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2237 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2238 &mut self,
2239 anchors: It,
2240 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2241 self.text.wait_for_anchors(anchors)
2242 }
2243
2244 /// Waits for the buffer to receive operations up to the given version.
2245 pub fn wait_for_version(
2246 &mut self,
2247 version: clock::Global,
2248 ) -> impl Future<Output = Result<()>> + use<> {
2249 self.text.wait_for_version(version)
2250 }
2251
2252 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2253 /// [`Buffer::wait_for_version`] to resolve with an error.
2254 pub fn give_up_waiting(&mut self) {
2255 self.text.give_up_waiting();
2256 }
2257
2258 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2259 let mut rx = None;
2260 if !self.autoindent_requests.is_empty() {
2261 let channel = oneshot::channel();
2262 self.wait_for_autoindent_txs.push(channel.0);
2263 rx = Some(channel.1);
2264 }
2265 rx
2266 }
2267
2268 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2269 pub fn set_active_selections(
2270 &mut self,
2271 selections: Arc<[Selection<Anchor>]>,
2272 line_mode: bool,
2273 cursor_shape: CursorShape,
2274 cx: &mut Context<Self>,
2275 ) {
2276 let lamport_timestamp = self.text.lamport_clock.tick();
2277 self.remote_selections.insert(
2278 self.text.replica_id(),
2279 SelectionSet {
2280 selections: selections.clone(),
2281 lamport_timestamp,
2282 line_mode,
2283 cursor_shape,
2284 },
2285 );
2286 self.send_operation(
2287 Operation::UpdateSelections {
2288 selections,
2289 line_mode,
2290 lamport_timestamp,
2291 cursor_shape,
2292 },
2293 true,
2294 cx,
2295 );
2296 self.non_text_state_update_count += 1;
2297 cx.notify();
2298 }
2299
2300 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2301 /// this replica.
2302 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2303 if self
2304 .remote_selections
2305 .get(&self.text.replica_id())
2306 .is_none_or(|set| !set.selections.is_empty())
2307 {
2308 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2309 }
2310 }
2311
2312 pub fn set_agent_selections(
2313 &mut self,
2314 selections: Arc<[Selection<Anchor>]>,
2315 line_mode: bool,
2316 cursor_shape: CursorShape,
2317 cx: &mut Context<Self>,
2318 ) {
2319 let lamport_timestamp = self.text.lamport_clock.tick();
2320 self.remote_selections.insert(
2321 ReplicaId::AGENT,
2322 SelectionSet {
2323 selections,
2324 lamport_timestamp,
2325 line_mode,
2326 cursor_shape,
2327 },
2328 );
2329 self.non_text_state_update_count += 1;
2330 cx.notify();
2331 }
2332
2333 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2334 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2335 }
2336
2337 /// Replaces the buffer's entire text.
2338 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2339 where
2340 T: Into<Arc<str>>,
2341 {
2342 self.autoindent_requests.clear();
2343 self.edit([(0..self.len(), text)], None, cx)
2344 }
2345
2346 /// Appends the given text to the end of the buffer.
2347 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2348 where
2349 T: Into<Arc<str>>,
2350 {
2351 self.edit([(self.len()..self.len(), text)], None, cx)
2352 }
2353
2354 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2355 /// delete, and a string of text to insert at that location.
2356 ///
2357 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2358 /// request for the edited ranges, which will be processed when the buffer finishes
2359 /// parsing.
2360 ///
2361 /// Parsing takes place at the end of a transaction, and may compute synchronously
2362 /// or asynchronously, depending on the changes.
2363 pub fn edit<I, S, T>(
2364 &mut self,
2365 edits_iter: I,
2366 autoindent_mode: Option<AutoindentMode>,
2367 cx: &mut Context<Self>,
2368 ) -> Option<clock::Lamport>
2369 where
2370 I: IntoIterator<Item = (Range<S>, T)>,
2371 S: ToOffset,
2372 T: Into<Arc<str>>,
2373 {
2374 // Skip invalid edits and coalesce contiguous ones.
2375 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2376
2377 for (range, new_text) in edits_iter {
2378 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2379
2380 if range.start > range.end {
2381 mem::swap(&mut range.start, &mut range.end);
2382 }
2383 let new_text = new_text.into();
2384 if !new_text.is_empty() || !range.is_empty() {
2385 if let Some((prev_range, prev_text)) = edits.last_mut()
2386 && prev_range.end >= range.start
2387 {
2388 prev_range.end = cmp::max(prev_range.end, range.end);
2389 *prev_text = format!("{prev_text}{new_text}").into();
2390 } else {
2391 edits.push((range, new_text));
2392 }
2393 }
2394 }
2395 if edits.is_empty() {
2396 return None;
2397 }
2398
2399 self.start_transaction();
2400 self.pending_autoindent.take();
2401 let autoindent_request = autoindent_mode
2402 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2403
2404 let edit_operation = self
2405 .text
2406 .edit(edits.iter().cloned(), cx.background_executor());
2407 let edit_id = edit_operation.timestamp();
2408
2409 if let Some((before_edit, mode)) = autoindent_request {
2410 let mut delta = 0isize;
2411 let mut previous_setting = None;
2412 let entries: Vec<_> = edits
2413 .into_iter()
2414 .enumerate()
2415 .zip(&edit_operation.as_edit().unwrap().new_text)
2416 .filter(|((_, (range, _)), _)| {
2417 let language = before_edit.language_at(range.start);
2418 let language_id = language.map(|l| l.id());
2419 if let Some((cached_language_id, auto_indent)) = previous_setting
2420 && cached_language_id == language_id
2421 {
2422 auto_indent
2423 } else {
2424 // The auto-indent setting is not present in editorconfigs, hence
2425 // we can avoid passing the file here.
2426 let auto_indent =
2427 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2428 previous_setting = Some((language_id, auto_indent));
2429 auto_indent
2430 }
2431 })
2432 .map(|((ix, (range, _)), new_text)| {
2433 let new_text_length = new_text.len();
2434 let old_start = range.start.to_point(&before_edit);
2435 let new_start = (delta + range.start as isize) as usize;
2436 let range_len = range.end - range.start;
2437 delta += new_text_length as isize - range_len as isize;
2438
2439 // Decide what range of the insertion to auto-indent, and whether
2440 // the first line of the insertion should be considered a newly-inserted line
2441 // or an edit to an existing line.
2442 let mut range_of_insertion_to_indent = 0..new_text_length;
2443 let mut first_line_is_new = true;
2444
2445 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2446 let old_line_end = before_edit.line_len(old_start.row);
2447
2448 if old_start.column > old_line_start {
2449 first_line_is_new = false;
2450 }
2451
2452 if !new_text.contains('\n')
2453 && (old_start.column + (range_len as u32) < old_line_end
2454 || old_line_end == old_line_start)
2455 {
2456 first_line_is_new = false;
2457 }
2458
2459 // When inserting text starting with a newline, avoid auto-indenting the
2460 // previous line.
2461 if new_text.starts_with('\n') {
2462 range_of_insertion_to_indent.start += 1;
2463 first_line_is_new = true;
2464 }
2465
2466 let mut original_indent_column = None;
2467 if let AutoindentMode::Block {
2468 original_indent_columns,
2469 } = &mode
2470 {
2471 original_indent_column = Some(if new_text.starts_with('\n') {
2472 indent_size_for_text(
2473 new_text[range_of_insertion_to_indent.clone()].chars(),
2474 )
2475 .len
2476 } else {
2477 original_indent_columns
2478 .get(ix)
2479 .copied()
2480 .flatten()
2481 .unwrap_or_else(|| {
2482 indent_size_for_text(
2483 new_text[range_of_insertion_to_indent.clone()].chars(),
2484 )
2485 .len
2486 })
2487 });
2488
2489 // Avoid auto-indenting the line after the edit.
2490 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2491 range_of_insertion_to_indent.end -= 1;
2492 }
2493 }
2494
2495 AutoindentRequestEntry {
2496 first_line_is_new,
2497 original_indent_column,
2498 indent_size: before_edit.language_indent_size_at(range.start, cx),
2499 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2500 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2501 }
2502 })
2503 .collect();
2504
2505 if !entries.is_empty() {
2506 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2507 before_edit,
2508 entries,
2509 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2510 ignore_empty_lines: false,
2511 }));
2512 }
2513 }
2514
2515 self.end_transaction(cx);
2516 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2517 Some(edit_id)
2518 }
2519
2520 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2521 self.was_changed();
2522
2523 if self.edits_since::<usize>(old_version).next().is_none() {
2524 return;
2525 }
2526
2527 self.reparse(cx);
2528 cx.emit(BufferEvent::Edited);
2529 if was_dirty != self.is_dirty() {
2530 cx.emit(BufferEvent::DirtyChanged);
2531 }
2532 cx.notify();
2533 }
2534
2535 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2536 where
2537 I: IntoIterator<Item = Range<T>>,
2538 T: ToOffset + Copy,
2539 {
2540 let before_edit = self.snapshot();
2541 let entries = ranges
2542 .into_iter()
2543 .map(|range| AutoindentRequestEntry {
2544 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2545 first_line_is_new: true,
2546 indent_size: before_edit.language_indent_size_at(range.start, cx),
2547 original_indent_column: None,
2548 })
2549 .collect();
2550 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2551 before_edit,
2552 entries,
2553 is_block_mode: false,
2554 ignore_empty_lines: true,
2555 }));
2556 self.request_autoindent(cx);
2557 }
2558
2559 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2560 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2561 pub fn insert_empty_line(
2562 &mut self,
2563 position: impl ToPoint,
2564 space_above: bool,
2565 space_below: bool,
2566 cx: &mut Context<Self>,
2567 ) -> Point {
2568 let mut position = position.to_point(self);
2569
2570 self.start_transaction();
2571
2572 self.edit(
2573 [(position..position, "\n")],
2574 Some(AutoindentMode::EachLine),
2575 cx,
2576 );
2577
2578 if position.column > 0 {
2579 position += Point::new(1, 0);
2580 }
2581
2582 if !self.is_line_blank(position.row) {
2583 self.edit(
2584 [(position..position, "\n")],
2585 Some(AutoindentMode::EachLine),
2586 cx,
2587 );
2588 }
2589
2590 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2591 self.edit(
2592 [(position..position, "\n")],
2593 Some(AutoindentMode::EachLine),
2594 cx,
2595 );
2596 position.row += 1;
2597 }
2598
2599 if space_below
2600 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2601 {
2602 self.edit(
2603 [(position..position, "\n")],
2604 Some(AutoindentMode::EachLine),
2605 cx,
2606 );
2607 }
2608
2609 self.end_transaction(cx);
2610
2611 position
2612 }
2613
2614 /// Applies the given remote operations to the buffer.
2615 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2616 self.pending_autoindent.take();
2617 let was_dirty = self.is_dirty();
2618 let old_version = self.version.clone();
2619 let mut deferred_ops = Vec::new();
2620 let buffer_ops = ops
2621 .into_iter()
2622 .filter_map(|op| match op {
2623 Operation::Buffer(op) => Some(op),
2624 _ => {
2625 if self.can_apply_op(&op) {
2626 self.apply_op(op, cx);
2627 } else {
2628 deferred_ops.push(op);
2629 }
2630 None
2631 }
2632 })
2633 .collect::<Vec<_>>();
2634 for operation in buffer_ops.iter() {
2635 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2636 }
2637 self.text
2638 .apply_ops(buffer_ops, Some(cx.background_executor()));
2639 self.deferred_ops.insert(deferred_ops);
2640 self.flush_deferred_ops(cx);
2641 self.did_edit(&old_version, was_dirty, cx);
2642 // Notify independently of whether the buffer was edited as the operations could include a
2643 // selection update.
2644 cx.notify();
2645 }
2646
2647 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2648 let mut deferred_ops = Vec::new();
2649 for op in self.deferred_ops.drain().iter().cloned() {
2650 if self.can_apply_op(&op) {
2651 self.apply_op(op, cx);
2652 } else {
2653 deferred_ops.push(op);
2654 }
2655 }
2656 self.deferred_ops.insert(deferred_ops);
2657 }
2658
2659 pub fn has_deferred_ops(&self) -> bool {
2660 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2661 }
2662
2663 fn can_apply_op(&self, operation: &Operation) -> bool {
2664 match operation {
2665 Operation::Buffer(_) => {
2666 unreachable!("buffer operations should never be applied at this layer")
2667 }
2668 Operation::UpdateDiagnostics {
2669 diagnostics: diagnostic_set,
2670 ..
2671 } => diagnostic_set.iter().all(|diagnostic| {
2672 self.text.can_resolve(&diagnostic.range.start)
2673 && self.text.can_resolve(&diagnostic.range.end)
2674 }),
2675 Operation::UpdateSelections { selections, .. } => selections
2676 .iter()
2677 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2678 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2679 }
2680 }
2681
2682 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2683 match operation {
2684 Operation::Buffer(_) => {
2685 unreachable!("buffer operations should never be applied at this layer")
2686 }
2687 Operation::UpdateDiagnostics {
2688 server_id,
2689 diagnostics: diagnostic_set,
2690 lamport_timestamp,
2691 } => {
2692 let snapshot = self.snapshot();
2693 self.apply_diagnostic_update(
2694 server_id,
2695 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2696 lamport_timestamp,
2697 cx,
2698 );
2699 }
2700 Operation::UpdateSelections {
2701 selections,
2702 lamport_timestamp,
2703 line_mode,
2704 cursor_shape,
2705 } => {
2706 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2707 && set.lamport_timestamp > lamport_timestamp
2708 {
2709 return;
2710 }
2711
2712 self.remote_selections.insert(
2713 lamport_timestamp.replica_id,
2714 SelectionSet {
2715 selections,
2716 lamport_timestamp,
2717 line_mode,
2718 cursor_shape,
2719 },
2720 );
2721 self.text.lamport_clock.observe(lamport_timestamp);
2722 self.non_text_state_update_count += 1;
2723 }
2724 Operation::UpdateCompletionTriggers {
2725 triggers,
2726 lamport_timestamp,
2727 server_id,
2728 } => {
2729 if triggers.is_empty() {
2730 self.completion_triggers_per_language_server
2731 .remove(&server_id);
2732 self.completion_triggers = self
2733 .completion_triggers_per_language_server
2734 .values()
2735 .flat_map(|triggers| triggers.iter().cloned())
2736 .collect();
2737 } else {
2738 self.completion_triggers_per_language_server
2739 .insert(server_id, triggers.iter().cloned().collect());
2740 self.completion_triggers.extend(triggers);
2741 }
2742 self.text.lamport_clock.observe(lamport_timestamp);
2743 }
2744 Operation::UpdateLineEnding {
2745 line_ending,
2746 lamport_timestamp,
2747 } => {
2748 self.text.set_line_ending(line_ending);
2749 self.text.lamport_clock.observe(lamport_timestamp);
2750 }
2751 }
2752 }
2753
2754 fn apply_diagnostic_update(
2755 &mut self,
2756 server_id: LanguageServerId,
2757 diagnostics: DiagnosticSet,
2758 lamport_timestamp: clock::Lamport,
2759 cx: &mut Context<Self>,
2760 ) {
2761 if lamport_timestamp > self.diagnostics_timestamp {
2762 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2763 if diagnostics.is_empty() {
2764 if let Ok(ix) = ix {
2765 self.diagnostics.remove(ix);
2766 }
2767 } else {
2768 match ix {
2769 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2770 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2771 };
2772 }
2773 self.diagnostics_timestamp = lamport_timestamp;
2774 self.non_text_state_update_count += 1;
2775 self.text.lamport_clock.observe(lamport_timestamp);
2776 cx.notify();
2777 cx.emit(BufferEvent::DiagnosticsUpdated);
2778 }
2779 }
2780
2781 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2782 self.was_changed();
2783 cx.emit(BufferEvent::Operation {
2784 operation,
2785 is_local,
2786 });
2787 }
2788
2789 /// Removes the selections for a given peer.
2790 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2791 self.remote_selections.remove(&replica_id);
2792 cx.notify();
2793 }
2794
2795 /// Undoes the most recent transaction.
2796 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2797 let was_dirty = self.is_dirty();
2798 let old_version = self.version.clone();
2799
2800 if let Some((transaction_id, operation)) = self.text.undo() {
2801 self.send_operation(Operation::Buffer(operation), true, cx);
2802 self.did_edit(&old_version, was_dirty, cx);
2803 Some(transaction_id)
2804 } else {
2805 None
2806 }
2807 }
2808
2809 /// Manually undoes a specific transaction in the buffer's undo history.
2810 pub fn undo_transaction(
2811 &mut self,
2812 transaction_id: TransactionId,
2813 cx: &mut Context<Self>,
2814 ) -> bool {
2815 let was_dirty = self.is_dirty();
2816 let old_version = self.version.clone();
2817 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2818 self.send_operation(Operation::Buffer(operation), true, cx);
2819 self.did_edit(&old_version, was_dirty, cx);
2820 true
2821 } else {
2822 false
2823 }
2824 }
2825
2826 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2827 pub fn undo_to_transaction(
2828 &mut self,
2829 transaction_id: TransactionId,
2830 cx: &mut Context<Self>,
2831 ) -> bool {
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834
2835 let operations = self.text.undo_to_transaction(transaction_id);
2836 let undone = !operations.is_empty();
2837 for operation in operations {
2838 self.send_operation(Operation::Buffer(operation), true, cx);
2839 }
2840 if undone {
2841 self.did_edit(&old_version, was_dirty, cx)
2842 }
2843 undone
2844 }
2845
2846 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2847 let was_dirty = self.is_dirty();
2848 let operation = self.text.undo_operations(counts);
2849 let old_version = self.version.clone();
2850 self.send_operation(Operation::Buffer(operation), true, cx);
2851 self.did_edit(&old_version, was_dirty, cx);
2852 }
2853
2854 /// Manually redoes a specific transaction in the buffer's redo history.
2855 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2856 let was_dirty = self.is_dirty();
2857 let old_version = self.version.clone();
2858
2859 if let Some((transaction_id, operation)) = self.text.redo() {
2860 self.send_operation(Operation::Buffer(operation), true, cx);
2861 self.did_edit(&old_version, was_dirty, cx);
2862 Some(transaction_id)
2863 } else {
2864 None
2865 }
2866 }
2867
2868 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2869 pub fn redo_to_transaction(
2870 &mut self,
2871 transaction_id: TransactionId,
2872 cx: &mut Context<Self>,
2873 ) -> bool {
2874 let was_dirty = self.is_dirty();
2875 let old_version = self.version.clone();
2876
2877 let operations = self.text.redo_to_transaction(transaction_id);
2878 let redone = !operations.is_empty();
2879 for operation in operations {
2880 self.send_operation(Operation::Buffer(operation), true, cx);
2881 }
2882 if redone {
2883 self.did_edit(&old_version, was_dirty, cx)
2884 }
2885 redone
2886 }
2887
2888 /// Override current completion triggers with the user-provided completion triggers.
2889 pub fn set_completion_triggers(
2890 &mut self,
2891 server_id: LanguageServerId,
2892 triggers: BTreeSet<String>,
2893 cx: &mut Context<Self>,
2894 ) {
2895 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2896 if triggers.is_empty() {
2897 self.completion_triggers_per_language_server
2898 .remove(&server_id);
2899 self.completion_triggers = self
2900 .completion_triggers_per_language_server
2901 .values()
2902 .flat_map(|triggers| triggers.iter().cloned())
2903 .collect();
2904 } else {
2905 self.completion_triggers_per_language_server
2906 .insert(server_id, triggers.clone());
2907 self.completion_triggers.extend(triggers.iter().cloned());
2908 }
2909 self.send_operation(
2910 Operation::UpdateCompletionTriggers {
2911 triggers: triggers.into_iter().collect(),
2912 lamport_timestamp: self.completion_triggers_timestamp,
2913 server_id,
2914 },
2915 true,
2916 cx,
2917 );
2918 cx.notify();
2919 }
2920
2921 /// Returns a list of strings which trigger a completion menu for this language.
2922 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2923 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2924 &self.completion_triggers
2925 }
2926
2927 /// Call this directly after performing edits to prevent the preview tab
2928 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2929 /// to return false until there are additional edits.
2930 pub fn refresh_preview(&mut self) {
2931 self.preview_version = self.version.clone();
2932 }
2933
2934 /// Whether we should preserve the preview status of a tab containing this buffer.
2935 pub fn preserve_preview(&self) -> bool {
2936 !self.has_edits_since(&self.preview_version)
2937 }
2938
2939 /// Update the `encoding` field, whenever the `encoding` field of the file changes
2940 pub fn update_encoding(&mut self) {
2941 if let Some(file) = self.file() {
2942 if let Some(encoding) = file.encoding() {
2943 self.encoding.set(encoding.get());
2944 } else {
2945 self.encoding.set(encodings::UTF_8);
2946 };
2947 }
2948 }
2949}
2950
2951#[doc(hidden)]
2952#[cfg(any(test, feature = "test-support"))]
2953impl Buffer {
2954 pub fn edit_via_marked_text(
2955 &mut self,
2956 marked_string: &str,
2957 autoindent_mode: Option<AutoindentMode>,
2958 cx: &mut Context<Self>,
2959 ) {
2960 let edits = self.edits_for_marked_text(marked_string);
2961 self.edit(edits, autoindent_mode, cx);
2962 }
2963
2964 pub fn set_group_interval(&mut self, group_interval: Duration) {
2965 self.text.set_group_interval(group_interval);
2966 }
2967
2968 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2969 where
2970 T: rand::Rng,
2971 {
2972 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2973 let mut last_end = None;
2974 for _ in 0..old_range_count {
2975 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2976 break;
2977 }
2978
2979 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2980 let mut range = self.random_byte_range(new_start, rng);
2981 if rng.random_bool(0.2) {
2982 mem::swap(&mut range.start, &mut range.end);
2983 }
2984 last_end = Some(range.end);
2985
2986 let new_text_len = rng.random_range(0..10);
2987 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2988 new_text = new_text.to_uppercase();
2989
2990 edits.push((range, new_text));
2991 }
2992 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2993 self.edit(edits, None, cx);
2994 }
2995
2996 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2997 let was_dirty = self.is_dirty();
2998 let old_version = self.version.clone();
2999
3000 let ops = self.text.randomly_undo_redo(rng);
3001 if !ops.is_empty() {
3002 for op in ops {
3003 self.send_operation(Operation::Buffer(op), true, cx);
3004 self.did_edit(&old_version, was_dirty, cx);
3005 }
3006 }
3007 }
3008}
3009
3010impl EventEmitter<BufferEvent> for Buffer {}
3011
3012impl Deref for Buffer {
3013 type Target = TextBuffer;
3014
3015 fn deref(&self) -> &Self::Target {
3016 &self.text
3017 }
3018}
3019
3020impl BufferSnapshot {
3021 /// Returns [`IndentSize`] for a given line that respects user settings and
3022 /// language preferences.
3023 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3024 indent_size_for_line(self, row)
3025 }
3026
3027 /// Returns [`IndentSize`] for a given position that respects user settings
3028 /// and language preferences.
3029 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3030 let settings = language_settings(
3031 self.language_at(position).map(|l| l.name()),
3032 self.file(),
3033 cx,
3034 );
3035 if settings.hard_tabs {
3036 IndentSize::tab()
3037 } else {
3038 IndentSize::spaces(settings.tab_size.get())
3039 }
3040 }
3041
3042 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3043 /// is passed in as `single_indent_size`.
3044 pub fn suggested_indents(
3045 &self,
3046 rows: impl Iterator<Item = u32>,
3047 single_indent_size: IndentSize,
3048 ) -> BTreeMap<u32, IndentSize> {
3049 let mut result = BTreeMap::new();
3050
3051 for row_range in contiguous_ranges(rows, 10) {
3052 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3053 Some(suggestions) => suggestions,
3054 _ => break,
3055 };
3056
3057 for (row, suggestion) in row_range.zip(suggestions) {
3058 let indent_size = if let Some(suggestion) = suggestion {
3059 result
3060 .get(&suggestion.basis_row)
3061 .copied()
3062 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3063 .with_delta(suggestion.delta, single_indent_size)
3064 } else {
3065 self.indent_size_for_line(row)
3066 };
3067
3068 result.insert(row, indent_size);
3069 }
3070 }
3071
3072 result
3073 }
3074
3075 fn suggest_autoindents(
3076 &self,
3077 row_range: Range<u32>,
3078 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3079 let config = &self.language.as_ref()?.config;
3080 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3081
3082 #[derive(Debug, Clone)]
3083 struct StartPosition {
3084 start: Point,
3085 suffix: SharedString,
3086 }
3087
3088 // Find the suggested indentation ranges based on the syntax tree.
3089 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3090 let end = Point::new(row_range.end, 0);
3091 let range = (start..end).to_offset(&self.text);
3092 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3093 Some(&grammar.indents_config.as_ref()?.query)
3094 });
3095 let indent_configs = matches
3096 .grammars()
3097 .iter()
3098 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3099 .collect::<Vec<_>>();
3100
3101 let mut indent_ranges = Vec::<Range<Point>>::new();
3102 let mut start_positions = Vec::<StartPosition>::new();
3103 let mut outdent_positions = Vec::<Point>::new();
3104 while let Some(mat) = matches.peek() {
3105 let mut start: Option<Point> = None;
3106 let mut end: Option<Point> = None;
3107
3108 let config = indent_configs[mat.grammar_index];
3109 for capture in mat.captures {
3110 if capture.index == config.indent_capture_ix {
3111 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3112 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3113 } else if Some(capture.index) == config.start_capture_ix {
3114 start = Some(Point::from_ts_point(capture.node.end_position()));
3115 } else if Some(capture.index) == config.end_capture_ix {
3116 end = Some(Point::from_ts_point(capture.node.start_position()));
3117 } else if Some(capture.index) == config.outdent_capture_ix {
3118 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3119 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3120 start_positions.push(StartPosition {
3121 start: Point::from_ts_point(capture.node.start_position()),
3122 suffix: suffix.clone(),
3123 });
3124 }
3125 }
3126
3127 matches.advance();
3128 if let Some((start, end)) = start.zip(end) {
3129 if start.row == end.row {
3130 continue;
3131 }
3132 let range = start..end;
3133 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3134 Err(ix) => indent_ranges.insert(ix, range),
3135 Ok(ix) => {
3136 let prev_range = &mut indent_ranges[ix];
3137 prev_range.end = prev_range.end.max(range.end);
3138 }
3139 }
3140 }
3141 }
3142
3143 let mut error_ranges = Vec::<Range<Point>>::new();
3144 let mut matches = self
3145 .syntax
3146 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3147 while let Some(mat) = matches.peek() {
3148 let node = mat.captures[0].node;
3149 let start = Point::from_ts_point(node.start_position());
3150 let end = Point::from_ts_point(node.end_position());
3151 let range = start..end;
3152 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3153 Ok(ix) | Err(ix) => ix,
3154 };
3155 let mut end_ix = ix;
3156 while let Some(existing_range) = error_ranges.get(end_ix) {
3157 if existing_range.end < end {
3158 end_ix += 1;
3159 } else {
3160 break;
3161 }
3162 }
3163 error_ranges.splice(ix..end_ix, [range]);
3164 matches.advance();
3165 }
3166
3167 outdent_positions.sort();
3168 for outdent_position in outdent_positions {
3169 // find the innermost indent range containing this outdent_position
3170 // set its end to the outdent position
3171 if let Some(range_to_truncate) = indent_ranges
3172 .iter_mut()
3173 .filter(|indent_range| indent_range.contains(&outdent_position))
3174 .next_back()
3175 {
3176 range_to_truncate.end = outdent_position;
3177 }
3178 }
3179
3180 start_positions.sort_by_key(|b| b.start);
3181
3182 // Find the suggested indentation increases and decreased based on regexes.
3183 let mut regex_outdent_map = HashMap::default();
3184 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3185 let mut start_positions_iter = start_positions.iter().peekable();
3186
3187 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3188 self.for_each_line(
3189 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3190 ..Point::new(row_range.end, 0),
3191 |row, line| {
3192 if config
3193 .decrease_indent_pattern
3194 .as_ref()
3195 .is_some_and(|regex| regex.is_match(line))
3196 {
3197 indent_change_rows.push((row, Ordering::Less));
3198 }
3199 if config
3200 .increase_indent_pattern
3201 .as_ref()
3202 .is_some_and(|regex| regex.is_match(line))
3203 {
3204 indent_change_rows.push((row + 1, Ordering::Greater));
3205 }
3206 while let Some(pos) = start_positions_iter.peek() {
3207 if pos.start.row < row {
3208 let pos = start_positions_iter.next().unwrap();
3209 last_seen_suffix
3210 .entry(pos.suffix.to_string())
3211 .or_default()
3212 .push(pos.start);
3213 } else {
3214 break;
3215 }
3216 }
3217 for rule in &config.decrease_indent_patterns {
3218 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3219 let row_start_column = self.indent_size_for_line(row).len;
3220 let basis_row = rule
3221 .valid_after
3222 .iter()
3223 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3224 .flatten()
3225 .filter(|start_point| start_point.column <= row_start_column)
3226 .max_by_key(|start_point| start_point.row);
3227 if let Some(outdent_to_row) = basis_row {
3228 regex_outdent_map.insert(row, outdent_to_row.row);
3229 }
3230 break;
3231 }
3232 }
3233 },
3234 );
3235
3236 let mut indent_changes = indent_change_rows.into_iter().peekable();
3237 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3238 prev_non_blank_row.unwrap_or(0)
3239 } else {
3240 row_range.start.saturating_sub(1)
3241 };
3242
3243 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3244 Some(row_range.map(move |row| {
3245 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3246
3247 let mut indent_from_prev_row = false;
3248 let mut outdent_from_prev_row = false;
3249 let mut outdent_to_row = u32::MAX;
3250 let mut from_regex = false;
3251
3252 while let Some((indent_row, delta)) = indent_changes.peek() {
3253 match indent_row.cmp(&row) {
3254 Ordering::Equal => match delta {
3255 Ordering::Less => {
3256 from_regex = true;
3257 outdent_from_prev_row = true
3258 }
3259 Ordering::Greater => {
3260 indent_from_prev_row = true;
3261 from_regex = true
3262 }
3263 _ => {}
3264 },
3265
3266 Ordering::Greater => break,
3267 Ordering::Less => {}
3268 }
3269
3270 indent_changes.next();
3271 }
3272
3273 for range in &indent_ranges {
3274 if range.start.row >= row {
3275 break;
3276 }
3277 if range.start.row == prev_row && range.end > row_start {
3278 indent_from_prev_row = true;
3279 }
3280 if range.end > prev_row_start && range.end <= row_start {
3281 outdent_to_row = outdent_to_row.min(range.start.row);
3282 }
3283 }
3284
3285 if let Some(basis_row) = regex_outdent_map.get(&row) {
3286 indent_from_prev_row = false;
3287 outdent_to_row = *basis_row;
3288 from_regex = true;
3289 }
3290
3291 let within_error = error_ranges
3292 .iter()
3293 .any(|e| e.start.row < row && e.end > row_start);
3294
3295 let suggestion = if outdent_to_row == prev_row
3296 || (outdent_from_prev_row && indent_from_prev_row)
3297 {
3298 Some(IndentSuggestion {
3299 basis_row: prev_row,
3300 delta: Ordering::Equal,
3301 within_error: within_error && !from_regex,
3302 })
3303 } else if indent_from_prev_row {
3304 Some(IndentSuggestion {
3305 basis_row: prev_row,
3306 delta: Ordering::Greater,
3307 within_error: within_error && !from_regex,
3308 })
3309 } else if outdent_to_row < prev_row {
3310 Some(IndentSuggestion {
3311 basis_row: outdent_to_row,
3312 delta: Ordering::Equal,
3313 within_error: within_error && !from_regex,
3314 })
3315 } else if outdent_from_prev_row {
3316 Some(IndentSuggestion {
3317 basis_row: prev_row,
3318 delta: Ordering::Less,
3319 within_error: within_error && !from_regex,
3320 })
3321 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3322 {
3323 Some(IndentSuggestion {
3324 basis_row: prev_row,
3325 delta: Ordering::Equal,
3326 within_error: within_error && !from_regex,
3327 })
3328 } else {
3329 None
3330 };
3331
3332 prev_row = row;
3333 prev_row_start = row_start;
3334 suggestion
3335 }))
3336 }
3337
3338 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3339 while row > 0 {
3340 row -= 1;
3341 if !self.is_line_blank(row) {
3342 return Some(row);
3343 }
3344 }
3345 None
3346 }
3347
3348 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3349 let captures = self.syntax.captures(range, &self.text, |grammar| {
3350 grammar
3351 .highlights_config
3352 .as_ref()
3353 .map(|config| &config.query)
3354 });
3355 let highlight_maps = captures
3356 .grammars()
3357 .iter()
3358 .map(|grammar| grammar.highlight_map())
3359 .collect();
3360 (captures, highlight_maps)
3361 }
3362
3363 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3364 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3365 /// returned in chunks where each chunk has a single syntax highlighting style and
3366 /// diagnostic status.
3367 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3368 let range = range.start.to_offset(self)..range.end.to_offset(self);
3369
3370 let mut syntax = None;
3371 if language_aware {
3372 syntax = Some(self.get_highlights(range.clone()));
3373 }
3374 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3375 let diagnostics = language_aware;
3376 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3377 }
3378
3379 pub fn highlighted_text_for_range<T: ToOffset>(
3380 &self,
3381 range: Range<T>,
3382 override_style: Option<HighlightStyle>,
3383 syntax_theme: &SyntaxTheme,
3384 ) -> HighlightedText {
3385 HighlightedText::from_buffer_range(
3386 range,
3387 &self.text,
3388 &self.syntax,
3389 override_style,
3390 syntax_theme,
3391 )
3392 }
3393
3394 /// Invokes the given callback for each line of text in the given range of the buffer.
3395 /// Uses callback to avoid allocating a string for each line.
3396 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3397 let mut line = String::new();
3398 let mut row = range.start.row;
3399 for chunk in self
3400 .as_rope()
3401 .chunks_in_range(range.to_offset(self))
3402 .chain(["\n"])
3403 {
3404 for (newline_ix, text) in chunk.split('\n').enumerate() {
3405 if newline_ix > 0 {
3406 callback(row, &line);
3407 row += 1;
3408 line.clear();
3409 }
3410 line.push_str(text);
3411 }
3412 }
3413 }
3414
3415 /// Iterates over every [`SyntaxLayer`] in the buffer.
3416 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3417 self.syntax_layers_for_range(0..self.len(), true)
3418 }
3419
3420 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3421 let offset = position.to_offset(self);
3422 self.syntax_layers_for_range(offset..offset, false)
3423 .filter(|l| l.node().end_byte() > offset)
3424 .last()
3425 }
3426
3427 pub fn syntax_layers_for_range<D: ToOffset>(
3428 &self,
3429 range: Range<D>,
3430 include_hidden: bool,
3431 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3432 self.syntax
3433 .layers_for_range(range, &self.text, include_hidden)
3434 }
3435
3436 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3437 &self,
3438 range: Range<D>,
3439 ) -> Option<SyntaxLayer<'_>> {
3440 let range = range.to_offset(self);
3441 self.syntax
3442 .layers_for_range(range, &self.text, false)
3443 .max_by(|a, b| {
3444 if a.depth != b.depth {
3445 a.depth.cmp(&b.depth)
3446 } else if a.offset.0 != b.offset.0 {
3447 a.offset.0.cmp(&b.offset.0)
3448 } else {
3449 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3450 }
3451 })
3452 }
3453
3454 /// Returns the main [`Language`].
3455 pub fn language(&self) -> Option<&Arc<Language>> {
3456 self.language.as_ref()
3457 }
3458
3459 /// Returns the [`Language`] at the given location.
3460 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3461 self.syntax_layer_at(position)
3462 .map(|info| info.language)
3463 .or(self.language.as_ref())
3464 }
3465
3466 /// Returns the settings for the language at the given location.
3467 pub fn settings_at<'a, D: ToOffset>(
3468 &'a self,
3469 position: D,
3470 cx: &'a App,
3471 ) -> Cow<'a, LanguageSettings> {
3472 language_settings(
3473 self.language_at(position).map(|l| l.name()),
3474 self.file.as_ref(),
3475 cx,
3476 )
3477 }
3478
3479 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3480 CharClassifier::new(self.language_scope_at(point))
3481 }
3482
3483 /// Returns the [`LanguageScope`] at the given location.
3484 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3485 let offset = position.to_offset(self);
3486 let mut scope = None;
3487 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3488
3489 // Use the layer that has the smallest node intersecting the given point.
3490 for layer in self
3491 .syntax
3492 .layers_for_range(offset..offset, &self.text, false)
3493 {
3494 let mut cursor = layer.node().walk();
3495
3496 let mut range = None;
3497 loop {
3498 let child_range = cursor.node().byte_range();
3499 if !child_range.contains(&offset) {
3500 break;
3501 }
3502
3503 range = Some(child_range);
3504 if cursor.goto_first_child_for_byte(offset).is_none() {
3505 break;
3506 }
3507 }
3508
3509 if let Some(range) = range
3510 && smallest_range_and_depth.as_ref().is_none_or(
3511 |(smallest_range, smallest_range_depth)| {
3512 if layer.depth > *smallest_range_depth {
3513 true
3514 } else if layer.depth == *smallest_range_depth {
3515 range.len() < smallest_range.len()
3516 } else {
3517 false
3518 }
3519 },
3520 )
3521 {
3522 smallest_range_and_depth = Some((range, layer.depth));
3523 scope = Some(LanguageScope {
3524 language: layer.language.clone(),
3525 override_id: layer.override_id(offset, &self.text),
3526 });
3527 }
3528 }
3529
3530 scope.or_else(|| {
3531 self.language.clone().map(|language| LanguageScope {
3532 language,
3533 override_id: None,
3534 })
3535 })
3536 }
3537
3538 /// Returns a tuple of the range and character kind of the word
3539 /// surrounding the given position.
3540 pub fn surrounding_word<T: ToOffset>(
3541 &self,
3542 start: T,
3543 scope_context: Option<CharScopeContext>,
3544 ) -> (Range<usize>, Option<CharKind>) {
3545 let mut start = start.to_offset(self);
3546 let mut end = start;
3547 let mut next_chars = self.chars_at(start).take(128).peekable();
3548 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3549
3550 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3551 let word_kind = cmp::max(
3552 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3553 next_chars.peek().copied().map(|c| classifier.kind(c)),
3554 );
3555
3556 for ch in prev_chars {
3557 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3558 start -= ch.len_utf8();
3559 } else {
3560 break;
3561 }
3562 }
3563
3564 for ch in next_chars {
3565 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3566 end += ch.len_utf8();
3567 } else {
3568 break;
3569 }
3570 }
3571
3572 (start..end, word_kind)
3573 }
3574
3575 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3576 /// range. When `require_larger` is true, the node found must be larger than the query range.
3577 ///
3578 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3579 /// be moved to the root of the tree.
3580 fn goto_node_enclosing_range(
3581 cursor: &mut tree_sitter::TreeCursor,
3582 query_range: &Range<usize>,
3583 require_larger: bool,
3584 ) -> bool {
3585 let mut ascending = false;
3586 loop {
3587 let mut range = cursor.node().byte_range();
3588 if query_range.is_empty() {
3589 // When the query range is empty and the current node starts after it, move to the
3590 // previous sibling to find the node the containing node.
3591 if range.start > query_range.start {
3592 cursor.goto_previous_sibling();
3593 range = cursor.node().byte_range();
3594 }
3595 } else {
3596 // When the query range is non-empty and the current node ends exactly at the start,
3597 // move to the next sibling to find a node that extends beyond the start.
3598 if range.end == query_range.start {
3599 cursor.goto_next_sibling();
3600 range = cursor.node().byte_range();
3601 }
3602 }
3603
3604 let encloses = range.contains_inclusive(query_range)
3605 && (!require_larger || range.len() > query_range.len());
3606 if !encloses {
3607 ascending = true;
3608 if !cursor.goto_parent() {
3609 return false;
3610 }
3611 continue;
3612 } else if ascending {
3613 return true;
3614 }
3615
3616 // Descend into the current node.
3617 if cursor
3618 .goto_first_child_for_byte(query_range.start)
3619 .is_none()
3620 {
3621 return true;
3622 }
3623 }
3624 }
3625
3626 pub fn syntax_ancestor<'a, T: ToOffset>(
3627 &'a self,
3628 range: Range<T>,
3629 ) -> Option<tree_sitter::Node<'a>> {
3630 let range = range.start.to_offset(self)..range.end.to_offset(self);
3631 let mut result: Option<tree_sitter::Node<'a>> = None;
3632 for layer in self
3633 .syntax
3634 .layers_for_range(range.clone(), &self.text, true)
3635 {
3636 let mut cursor = layer.node().walk();
3637
3638 // Find the node that both contains the range and is larger than it.
3639 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3640 continue;
3641 }
3642
3643 let left_node = cursor.node();
3644 let mut layer_result = left_node;
3645
3646 // For an empty range, try to find another node immediately to the right of the range.
3647 if left_node.end_byte() == range.start {
3648 let mut right_node = None;
3649 while !cursor.goto_next_sibling() {
3650 if !cursor.goto_parent() {
3651 break;
3652 }
3653 }
3654
3655 while cursor.node().start_byte() == range.start {
3656 right_node = Some(cursor.node());
3657 if !cursor.goto_first_child() {
3658 break;
3659 }
3660 }
3661
3662 // If there is a candidate node on both sides of the (empty) range, then
3663 // decide between the two by favoring a named node over an anonymous token.
3664 // If both nodes are the same in that regard, favor the right one.
3665 if let Some(right_node) = right_node
3666 && (right_node.is_named() || !left_node.is_named())
3667 {
3668 layer_result = right_node;
3669 }
3670 }
3671
3672 if let Some(previous_result) = &result
3673 && previous_result.byte_range().len() < layer_result.byte_range().len()
3674 {
3675 continue;
3676 }
3677 result = Some(layer_result);
3678 }
3679
3680 result
3681 }
3682
3683 /// Find the previous sibling syntax node at the given range.
3684 ///
3685 /// This function locates the syntax node that precedes the node containing
3686 /// the given range. It searches hierarchically by:
3687 /// 1. Finding the node that contains the given range
3688 /// 2. Looking for the previous sibling at the same tree level
3689 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3690 ///
3691 /// Returns `None` if there is no previous sibling at any ancestor level.
3692 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3693 &'a self,
3694 range: Range<T>,
3695 ) -> Option<tree_sitter::Node<'a>> {
3696 let range = range.start.to_offset(self)..range.end.to_offset(self);
3697 let mut result: Option<tree_sitter::Node<'a>> = None;
3698
3699 for layer in self
3700 .syntax
3701 .layers_for_range(range.clone(), &self.text, true)
3702 {
3703 let mut cursor = layer.node().walk();
3704
3705 // Find the node that contains the range
3706 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3707 continue;
3708 }
3709
3710 // Look for the previous sibling, moving up ancestor levels if needed
3711 loop {
3712 if cursor.goto_previous_sibling() {
3713 let layer_result = cursor.node();
3714
3715 if let Some(previous_result) = &result {
3716 if previous_result.byte_range().end < layer_result.byte_range().end {
3717 continue;
3718 }
3719 }
3720 result = Some(layer_result);
3721 break;
3722 }
3723
3724 // No sibling found at this level, try moving up to parent
3725 if !cursor.goto_parent() {
3726 break;
3727 }
3728 }
3729 }
3730
3731 result
3732 }
3733
3734 /// Find the next sibling syntax node at the given range.
3735 ///
3736 /// This function locates the syntax node that follows the node containing
3737 /// the given range. It searches hierarchically by:
3738 /// 1. Finding the node that contains the given range
3739 /// 2. Looking for the next sibling at the same tree level
3740 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3741 ///
3742 /// Returns `None` if there is no next sibling at any ancestor level.
3743 pub fn syntax_next_sibling<'a, T: ToOffset>(
3744 &'a self,
3745 range: Range<T>,
3746 ) -> Option<tree_sitter::Node<'a>> {
3747 let range = range.start.to_offset(self)..range.end.to_offset(self);
3748 let mut result: Option<tree_sitter::Node<'a>> = None;
3749
3750 for layer in self
3751 .syntax
3752 .layers_for_range(range.clone(), &self.text, true)
3753 {
3754 let mut cursor = layer.node().walk();
3755
3756 // Find the node that contains the range
3757 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3758 continue;
3759 }
3760
3761 // Look for the next sibling, moving up ancestor levels if needed
3762 loop {
3763 if cursor.goto_next_sibling() {
3764 let layer_result = cursor.node();
3765
3766 if let Some(previous_result) = &result {
3767 if previous_result.byte_range().start > layer_result.byte_range().start {
3768 continue;
3769 }
3770 }
3771 result = Some(layer_result);
3772 break;
3773 }
3774
3775 // No sibling found at this level, try moving up to parent
3776 if !cursor.goto_parent() {
3777 break;
3778 }
3779 }
3780 }
3781
3782 result
3783 }
3784
3785 /// Returns the root syntax node within the given row
3786 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3787 let start_offset = position.to_offset(self);
3788
3789 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3790
3791 let layer = self
3792 .syntax
3793 .layers_for_range(start_offset..start_offset, &self.text, true)
3794 .next()?;
3795
3796 let mut cursor = layer.node().walk();
3797
3798 // Descend to the first leaf that touches the start of the range.
3799 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3800 if cursor.node().end_byte() == start_offset {
3801 cursor.goto_next_sibling();
3802 }
3803 }
3804
3805 // Ascend to the root node within the same row.
3806 while cursor.goto_parent() {
3807 if cursor.node().start_position().row != row {
3808 break;
3809 }
3810 }
3811
3812 Some(cursor.node())
3813 }
3814
3815 /// Returns the outline for the buffer.
3816 ///
3817 /// This method allows passing an optional [`SyntaxTheme`] to
3818 /// syntax-highlight the returned symbols.
3819 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3820 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3821 }
3822
3823 /// Returns all the symbols that contain the given position.
3824 ///
3825 /// This method allows passing an optional [`SyntaxTheme`] to
3826 /// syntax-highlight the returned symbols.
3827 pub fn symbols_containing<T: ToOffset>(
3828 &self,
3829 position: T,
3830 theme: Option<&SyntaxTheme>,
3831 ) -> Vec<OutlineItem<Anchor>> {
3832 let position = position.to_offset(self);
3833 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3834 let end = self.clip_offset(position + 1, Bias::Right);
3835 let mut items = self.outline_items_containing(start..end, false, theme);
3836 let mut prev_depth = None;
3837 items.retain(|item| {
3838 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3839 prev_depth = Some(item.depth);
3840 result
3841 });
3842 items
3843 }
3844
3845 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3846 let range = range.to_offset(self);
3847 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3848 grammar.outline_config.as_ref().map(|c| &c.query)
3849 });
3850 let configs = matches
3851 .grammars()
3852 .iter()
3853 .map(|g| g.outline_config.as_ref().unwrap())
3854 .collect::<Vec<_>>();
3855
3856 while let Some(mat) = matches.peek() {
3857 let config = &configs[mat.grammar_index];
3858 let containing_item_node = maybe!({
3859 let item_node = mat.captures.iter().find_map(|cap| {
3860 if cap.index == config.item_capture_ix {
3861 Some(cap.node)
3862 } else {
3863 None
3864 }
3865 })?;
3866
3867 let item_byte_range = item_node.byte_range();
3868 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3869 None
3870 } else {
3871 Some(item_node)
3872 }
3873 });
3874
3875 if let Some(item_node) = containing_item_node {
3876 return Some(
3877 Point::from_ts_point(item_node.start_position())
3878 ..Point::from_ts_point(item_node.end_position()),
3879 );
3880 }
3881
3882 matches.advance();
3883 }
3884 None
3885 }
3886
3887 pub fn outline_items_containing<T: ToOffset>(
3888 &self,
3889 range: Range<T>,
3890 include_extra_context: bool,
3891 theme: Option<&SyntaxTheme>,
3892 ) -> Vec<OutlineItem<Anchor>> {
3893 self.outline_items_containing_internal(
3894 range,
3895 include_extra_context,
3896 theme,
3897 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3898 )
3899 }
3900
3901 pub fn outline_items_as_points_containing<T: ToOffset>(
3902 &self,
3903 range: Range<T>,
3904 include_extra_context: bool,
3905 theme: Option<&SyntaxTheme>,
3906 ) -> Vec<OutlineItem<Point>> {
3907 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3908 range
3909 })
3910 }
3911
3912 fn outline_items_containing_internal<T: ToOffset, U>(
3913 &self,
3914 range: Range<T>,
3915 include_extra_context: bool,
3916 theme: Option<&SyntaxTheme>,
3917 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3918 ) -> Vec<OutlineItem<U>> {
3919 let range = range.to_offset(self);
3920 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3921 grammar.outline_config.as_ref().map(|c| &c.query)
3922 });
3923
3924 let mut items = Vec::new();
3925 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3926 while let Some(mat) = matches.peek() {
3927 let config = matches.grammars()[mat.grammar_index]
3928 .outline_config
3929 .as_ref()
3930 .unwrap();
3931 if let Some(item) =
3932 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3933 {
3934 items.push(item);
3935 } else if let Some(capture) = mat
3936 .captures
3937 .iter()
3938 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3939 {
3940 let capture_range = capture.node.start_position()..capture.node.end_position();
3941 let mut capture_row_range =
3942 capture_range.start.row as u32..capture_range.end.row as u32;
3943 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3944 {
3945 capture_row_range.end -= 1;
3946 }
3947 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3948 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3949 last_row_range.end = capture_row_range.end;
3950 } else {
3951 annotation_row_ranges.push(capture_row_range);
3952 }
3953 } else {
3954 annotation_row_ranges.push(capture_row_range);
3955 }
3956 }
3957 matches.advance();
3958 }
3959
3960 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3961
3962 // Assign depths based on containment relationships and convert to anchors.
3963 let mut item_ends_stack = Vec::<Point>::new();
3964 let mut anchor_items = Vec::new();
3965 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3966 for item in items {
3967 while let Some(last_end) = item_ends_stack.last().copied() {
3968 if last_end < item.range.end {
3969 item_ends_stack.pop();
3970 } else {
3971 break;
3972 }
3973 }
3974
3975 let mut annotation_row_range = None;
3976 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3977 let row_preceding_item = item.range.start.row.saturating_sub(1);
3978 if next_annotation_row_range.end < row_preceding_item {
3979 annotation_row_ranges.next();
3980 } else {
3981 if next_annotation_row_range.end == row_preceding_item {
3982 annotation_row_range = Some(next_annotation_row_range.clone());
3983 annotation_row_ranges.next();
3984 }
3985 break;
3986 }
3987 }
3988
3989 anchor_items.push(OutlineItem {
3990 depth: item_ends_stack.len(),
3991 range: range_callback(self, item.range.clone()),
3992 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3993 text: item.text,
3994 highlight_ranges: item.highlight_ranges,
3995 name_ranges: item.name_ranges,
3996 body_range: item.body_range.map(|r| range_callback(self, r)),
3997 annotation_range: annotation_row_range.map(|annotation_range| {
3998 let point_range = Point::new(annotation_range.start, 0)
3999 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4000 range_callback(self, point_range)
4001 }),
4002 });
4003 item_ends_stack.push(item.range.end);
4004 }
4005
4006 anchor_items
4007 }
4008
4009 fn next_outline_item(
4010 &self,
4011 config: &OutlineConfig,
4012 mat: &SyntaxMapMatch,
4013 range: &Range<usize>,
4014 include_extra_context: bool,
4015 theme: Option<&SyntaxTheme>,
4016 ) -> Option<OutlineItem<Point>> {
4017 let item_node = mat.captures.iter().find_map(|cap| {
4018 if cap.index == config.item_capture_ix {
4019 Some(cap.node)
4020 } else {
4021 None
4022 }
4023 })?;
4024
4025 let item_byte_range = item_node.byte_range();
4026 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4027 return None;
4028 }
4029 let item_point_range = Point::from_ts_point(item_node.start_position())
4030 ..Point::from_ts_point(item_node.end_position());
4031
4032 let mut open_point = None;
4033 let mut close_point = None;
4034
4035 let mut buffer_ranges = Vec::new();
4036 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4037 let mut range = node.start_byte()..node.end_byte();
4038 let start = node.start_position();
4039 if node.end_position().row > start.row {
4040 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4041 }
4042
4043 if !range.is_empty() {
4044 buffer_ranges.push((range, node_is_name));
4045 }
4046 };
4047
4048 for capture in mat.captures {
4049 if capture.index == config.name_capture_ix {
4050 add_to_buffer_ranges(capture.node, true);
4051 } else if Some(capture.index) == config.context_capture_ix
4052 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4053 {
4054 add_to_buffer_ranges(capture.node, false);
4055 } else {
4056 if Some(capture.index) == config.open_capture_ix {
4057 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4058 } else if Some(capture.index) == config.close_capture_ix {
4059 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4060 }
4061 }
4062 }
4063
4064 if buffer_ranges.is_empty() {
4065 return None;
4066 }
4067 let source_range_for_text =
4068 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4069
4070 let mut text = String::new();
4071 let mut highlight_ranges = Vec::new();
4072 let mut name_ranges = Vec::new();
4073 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4074 let mut last_buffer_range_end = 0;
4075 for (buffer_range, is_name) in buffer_ranges {
4076 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4077 if space_added {
4078 text.push(' ');
4079 }
4080 let before_append_len = text.len();
4081 let mut offset = buffer_range.start;
4082 chunks.seek(buffer_range.clone());
4083 for mut chunk in chunks.by_ref() {
4084 if chunk.text.len() > buffer_range.end - offset {
4085 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4086 offset = buffer_range.end;
4087 } else {
4088 offset += chunk.text.len();
4089 }
4090 let style = chunk
4091 .syntax_highlight_id
4092 .zip(theme)
4093 .and_then(|(highlight, theme)| highlight.style(theme));
4094 if let Some(style) = style {
4095 let start = text.len();
4096 let end = start + chunk.text.len();
4097 highlight_ranges.push((start..end, style));
4098 }
4099 text.push_str(chunk.text);
4100 if offset >= buffer_range.end {
4101 break;
4102 }
4103 }
4104 if is_name {
4105 let after_append_len = text.len();
4106 let start = if space_added && !name_ranges.is_empty() {
4107 before_append_len - 1
4108 } else {
4109 before_append_len
4110 };
4111 name_ranges.push(start..after_append_len);
4112 }
4113 last_buffer_range_end = buffer_range.end;
4114 }
4115
4116 Some(OutlineItem {
4117 depth: 0, // We'll calculate the depth later
4118 range: item_point_range,
4119 source_range_for_text: source_range_for_text.to_point(self),
4120 text,
4121 highlight_ranges,
4122 name_ranges,
4123 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4124 annotation_range: None,
4125 })
4126 }
4127
4128 pub fn function_body_fold_ranges<T: ToOffset>(
4129 &self,
4130 within: Range<T>,
4131 ) -> impl Iterator<Item = Range<usize>> + '_ {
4132 self.text_object_ranges(within, TreeSitterOptions::default())
4133 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4134 }
4135
4136 /// For each grammar in the language, runs the provided
4137 /// [`tree_sitter::Query`] against the given range.
4138 pub fn matches(
4139 &self,
4140 range: Range<usize>,
4141 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4142 ) -> SyntaxMapMatches<'_> {
4143 self.syntax.matches(range, self, query)
4144 }
4145
4146 pub fn all_bracket_ranges(
4147 &self,
4148 range: Range<usize>,
4149 ) -> impl Iterator<Item = BracketMatch> + '_ {
4150 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4151 grammar.brackets_config.as_ref().map(|c| &c.query)
4152 });
4153 let configs = matches
4154 .grammars()
4155 .iter()
4156 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4157 .collect::<Vec<_>>();
4158
4159 iter::from_fn(move || {
4160 while let Some(mat) = matches.peek() {
4161 let mut open = None;
4162 let mut close = None;
4163 let config = &configs[mat.grammar_index];
4164 let pattern = &config.patterns[mat.pattern_index];
4165 for capture in mat.captures {
4166 if capture.index == config.open_capture_ix {
4167 open = Some(capture.node.byte_range());
4168 } else if capture.index == config.close_capture_ix {
4169 close = Some(capture.node.byte_range());
4170 }
4171 }
4172
4173 matches.advance();
4174
4175 let Some((open_range, close_range)) = open.zip(close) else {
4176 continue;
4177 };
4178
4179 let bracket_range = open_range.start..=close_range.end;
4180 if !bracket_range.overlaps(&range) {
4181 continue;
4182 }
4183
4184 return Some(BracketMatch {
4185 open_range,
4186 close_range,
4187 newline_only: pattern.newline_only,
4188 });
4189 }
4190 None
4191 })
4192 }
4193
4194 /// Returns bracket range pairs overlapping or adjacent to `range`
4195 pub fn bracket_ranges<T: ToOffset>(
4196 &self,
4197 range: Range<T>,
4198 ) -> impl Iterator<Item = BracketMatch> + '_ {
4199 // Find bracket pairs that *inclusively* contain the given range.
4200 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4201 self.all_bracket_ranges(range)
4202 .filter(|pair| !pair.newline_only)
4203 }
4204
4205 pub fn debug_variables_query<T: ToOffset>(
4206 &self,
4207 range: Range<T>,
4208 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4209 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4210
4211 let mut matches = self.syntax.matches_with_options(
4212 range.clone(),
4213 &self.text,
4214 TreeSitterOptions::default(),
4215 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4216 );
4217
4218 let configs = matches
4219 .grammars()
4220 .iter()
4221 .map(|grammar| grammar.debug_variables_config.as_ref())
4222 .collect::<Vec<_>>();
4223
4224 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4225
4226 iter::from_fn(move || {
4227 loop {
4228 while let Some(capture) = captures.pop() {
4229 if capture.0.overlaps(&range) {
4230 return Some(capture);
4231 }
4232 }
4233
4234 let mat = matches.peek()?;
4235
4236 let Some(config) = configs[mat.grammar_index].as_ref() else {
4237 matches.advance();
4238 continue;
4239 };
4240
4241 for capture in mat.captures {
4242 let Some(ix) = config
4243 .objects_by_capture_ix
4244 .binary_search_by_key(&capture.index, |e| e.0)
4245 .ok()
4246 else {
4247 continue;
4248 };
4249 let text_object = config.objects_by_capture_ix[ix].1;
4250 let byte_range = capture.node.byte_range();
4251
4252 let mut found = false;
4253 for (range, existing) in captures.iter_mut() {
4254 if existing == &text_object {
4255 range.start = range.start.min(byte_range.start);
4256 range.end = range.end.max(byte_range.end);
4257 found = true;
4258 break;
4259 }
4260 }
4261
4262 if !found {
4263 captures.push((byte_range, text_object));
4264 }
4265 }
4266
4267 matches.advance();
4268 }
4269 })
4270 }
4271
4272 pub fn text_object_ranges<T: ToOffset>(
4273 &self,
4274 range: Range<T>,
4275 options: TreeSitterOptions,
4276 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4277 let range =
4278 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4279
4280 let mut matches =
4281 self.syntax
4282 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4283 grammar.text_object_config.as_ref().map(|c| &c.query)
4284 });
4285
4286 let configs = matches
4287 .grammars()
4288 .iter()
4289 .map(|grammar| grammar.text_object_config.as_ref())
4290 .collect::<Vec<_>>();
4291
4292 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4293
4294 iter::from_fn(move || {
4295 loop {
4296 while let Some(capture) = captures.pop() {
4297 if capture.0.overlaps(&range) {
4298 return Some(capture);
4299 }
4300 }
4301
4302 let mat = matches.peek()?;
4303
4304 let Some(config) = configs[mat.grammar_index].as_ref() else {
4305 matches.advance();
4306 continue;
4307 };
4308
4309 for capture in mat.captures {
4310 let Some(ix) = config
4311 .text_objects_by_capture_ix
4312 .binary_search_by_key(&capture.index, |e| e.0)
4313 .ok()
4314 else {
4315 continue;
4316 };
4317 let text_object = config.text_objects_by_capture_ix[ix].1;
4318 let byte_range = capture.node.byte_range();
4319
4320 let mut found = false;
4321 for (range, existing) in captures.iter_mut() {
4322 if existing == &text_object {
4323 range.start = range.start.min(byte_range.start);
4324 range.end = range.end.max(byte_range.end);
4325 found = true;
4326 break;
4327 }
4328 }
4329
4330 if !found {
4331 captures.push((byte_range, text_object));
4332 }
4333 }
4334
4335 matches.advance();
4336 }
4337 })
4338 }
4339
4340 /// Returns enclosing bracket ranges containing the given range
4341 pub fn enclosing_bracket_ranges<T: ToOffset>(
4342 &self,
4343 range: Range<T>,
4344 ) -> impl Iterator<Item = BracketMatch> + '_ {
4345 let range = range.start.to_offset(self)..range.end.to_offset(self);
4346
4347 self.bracket_ranges(range.clone()).filter(move |pair| {
4348 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4349 })
4350 }
4351
4352 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4353 ///
4354 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4355 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4356 &self,
4357 range: Range<T>,
4358 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4359 ) -> Option<(Range<usize>, Range<usize>)> {
4360 let range = range.start.to_offset(self)..range.end.to_offset(self);
4361
4362 // Get the ranges of the innermost pair of brackets.
4363 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4364
4365 for pair in self.enclosing_bracket_ranges(range) {
4366 if let Some(range_filter) = range_filter
4367 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4368 {
4369 continue;
4370 }
4371
4372 let len = pair.close_range.end - pair.open_range.start;
4373
4374 if let Some((existing_open, existing_close)) = &result {
4375 let existing_len = existing_close.end - existing_open.start;
4376 if len > existing_len {
4377 continue;
4378 }
4379 }
4380
4381 result = Some((pair.open_range, pair.close_range));
4382 }
4383
4384 result
4385 }
4386
4387 /// Returns anchor ranges for any matches of the redaction query.
4388 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4389 /// will be run on the relevant section of the buffer.
4390 pub fn redacted_ranges<T: ToOffset>(
4391 &self,
4392 range: Range<T>,
4393 ) -> impl Iterator<Item = Range<usize>> + '_ {
4394 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4395 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4396 grammar
4397 .redactions_config
4398 .as_ref()
4399 .map(|config| &config.query)
4400 });
4401
4402 let configs = syntax_matches
4403 .grammars()
4404 .iter()
4405 .map(|grammar| grammar.redactions_config.as_ref())
4406 .collect::<Vec<_>>();
4407
4408 iter::from_fn(move || {
4409 let redacted_range = syntax_matches
4410 .peek()
4411 .and_then(|mat| {
4412 configs[mat.grammar_index].and_then(|config| {
4413 mat.captures
4414 .iter()
4415 .find(|capture| capture.index == config.redaction_capture_ix)
4416 })
4417 })
4418 .map(|mat| mat.node.byte_range());
4419 syntax_matches.advance();
4420 redacted_range
4421 })
4422 }
4423
4424 pub fn injections_intersecting_range<T: ToOffset>(
4425 &self,
4426 range: Range<T>,
4427 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4428 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4429
4430 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4431 grammar
4432 .injection_config
4433 .as_ref()
4434 .map(|config| &config.query)
4435 });
4436
4437 let configs = syntax_matches
4438 .grammars()
4439 .iter()
4440 .map(|grammar| grammar.injection_config.as_ref())
4441 .collect::<Vec<_>>();
4442
4443 iter::from_fn(move || {
4444 let ranges = syntax_matches.peek().and_then(|mat| {
4445 let config = &configs[mat.grammar_index]?;
4446 let content_capture_range = mat.captures.iter().find_map(|capture| {
4447 if capture.index == config.content_capture_ix {
4448 Some(capture.node.byte_range())
4449 } else {
4450 None
4451 }
4452 })?;
4453 let language = self.language_at(content_capture_range.start)?;
4454 Some((content_capture_range, language))
4455 });
4456 syntax_matches.advance();
4457 ranges
4458 })
4459 }
4460
4461 pub fn runnable_ranges(
4462 &self,
4463 offset_range: Range<usize>,
4464 ) -> impl Iterator<Item = RunnableRange> + '_ {
4465 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4466 grammar.runnable_config.as_ref().map(|config| &config.query)
4467 });
4468
4469 let test_configs = syntax_matches
4470 .grammars()
4471 .iter()
4472 .map(|grammar| grammar.runnable_config.as_ref())
4473 .collect::<Vec<_>>();
4474
4475 iter::from_fn(move || {
4476 loop {
4477 let mat = syntax_matches.peek()?;
4478
4479 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4480 let mut run_range = None;
4481 let full_range = mat.captures.iter().fold(
4482 Range {
4483 start: usize::MAX,
4484 end: 0,
4485 },
4486 |mut acc, next| {
4487 let byte_range = next.node.byte_range();
4488 if acc.start > byte_range.start {
4489 acc.start = byte_range.start;
4490 }
4491 if acc.end < byte_range.end {
4492 acc.end = byte_range.end;
4493 }
4494 acc
4495 },
4496 );
4497 if full_range.start > full_range.end {
4498 // We did not find a full spanning range of this match.
4499 return None;
4500 }
4501 let extra_captures: SmallVec<[_; 1]> =
4502 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4503 test_configs
4504 .extra_captures
4505 .get(capture.index as usize)
4506 .cloned()
4507 .and_then(|tag_name| match tag_name {
4508 RunnableCapture::Named(name) => {
4509 Some((capture.node.byte_range(), name))
4510 }
4511 RunnableCapture::Run => {
4512 let _ = run_range.insert(capture.node.byte_range());
4513 None
4514 }
4515 })
4516 }));
4517 let run_range = run_range?;
4518 let tags = test_configs
4519 .query
4520 .property_settings(mat.pattern_index)
4521 .iter()
4522 .filter_map(|property| {
4523 if *property.key == *"tag" {
4524 property
4525 .value
4526 .as_ref()
4527 .map(|value| RunnableTag(value.to_string().into()))
4528 } else {
4529 None
4530 }
4531 })
4532 .collect();
4533 let extra_captures = extra_captures
4534 .into_iter()
4535 .map(|(range, name)| {
4536 (
4537 name.to_string(),
4538 self.text_for_range(range).collect::<String>(),
4539 )
4540 })
4541 .collect();
4542 // All tags should have the same range.
4543 Some(RunnableRange {
4544 run_range,
4545 full_range,
4546 runnable: Runnable {
4547 tags,
4548 language: mat.language,
4549 buffer: self.remote_id(),
4550 },
4551 extra_captures,
4552 buffer_id: self.remote_id(),
4553 })
4554 });
4555
4556 syntax_matches.advance();
4557 if test_range.is_some() {
4558 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4559 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4560 return test_range;
4561 }
4562 }
4563 })
4564 }
4565
4566 /// Returns selections for remote peers intersecting the given range.
4567 #[allow(clippy::type_complexity)]
4568 pub fn selections_in_range(
4569 &self,
4570 range: Range<Anchor>,
4571 include_local: bool,
4572 ) -> impl Iterator<
4573 Item = (
4574 ReplicaId,
4575 bool,
4576 CursorShape,
4577 impl Iterator<Item = &Selection<Anchor>> + '_,
4578 ),
4579 > + '_ {
4580 self.remote_selections
4581 .iter()
4582 .filter(move |(replica_id, set)| {
4583 (include_local || **replica_id != self.text.replica_id())
4584 && !set.selections.is_empty()
4585 })
4586 .map(move |(replica_id, set)| {
4587 let start_ix = match set.selections.binary_search_by(|probe| {
4588 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4589 }) {
4590 Ok(ix) | Err(ix) => ix,
4591 };
4592 let end_ix = match set.selections.binary_search_by(|probe| {
4593 probe.start.cmp(&range.end, self).then(Ordering::Less)
4594 }) {
4595 Ok(ix) | Err(ix) => ix,
4596 };
4597
4598 (
4599 *replica_id,
4600 set.line_mode,
4601 set.cursor_shape,
4602 set.selections[start_ix..end_ix].iter(),
4603 )
4604 })
4605 }
4606
4607 /// Returns if the buffer contains any diagnostics.
4608 pub fn has_diagnostics(&self) -> bool {
4609 !self.diagnostics.is_empty()
4610 }
4611
4612 /// Returns all the diagnostics intersecting the given range.
4613 pub fn diagnostics_in_range<'a, T, O>(
4614 &'a self,
4615 search_range: Range<T>,
4616 reversed: bool,
4617 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4618 where
4619 T: 'a + Clone + ToOffset,
4620 O: 'a + FromAnchor,
4621 {
4622 let mut iterators: Vec<_> = self
4623 .diagnostics
4624 .iter()
4625 .map(|(_, collection)| {
4626 collection
4627 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4628 .peekable()
4629 })
4630 .collect();
4631
4632 std::iter::from_fn(move || {
4633 let (next_ix, _) = iterators
4634 .iter_mut()
4635 .enumerate()
4636 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4637 .min_by(|(_, a), (_, b)| {
4638 let cmp = a
4639 .range
4640 .start
4641 .cmp(&b.range.start, self)
4642 // when range is equal, sort by diagnostic severity
4643 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4644 // and stabilize order with group_id
4645 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4646 if reversed { cmp.reverse() } else { cmp }
4647 })?;
4648 iterators[next_ix]
4649 .next()
4650 .map(
4651 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4652 diagnostic,
4653 range: FromAnchor::from_anchor(&range.start, self)
4654 ..FromAnchor::from_anchor(&range.end, self),
4655 },
4656 )
4657 })
4658 }
4659
4660 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4661 /// should be used instead.
4662 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4663 &self.diagnostics
4664 }
4665
4666 /// Returns all the diagnostic groups associated with the given
4667 /// language server ID. If no language server ID is provided,
4668 /// all diagnostics groups are returned.
4669 pub fn diagnostic_groups(
4670 &self,
4671 language_server_id: Option<LanguageServerId>,
4672 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4673 let mut groups = Vec::new();
4674
4675 if let Some(language_server_id) = language_server_id {
4676 if let Ok(ix) = self
4677 .diagnostics
4678 .binary_search_by_key(&language_server_id, |e| e.0)
4679 {
4680 self.diagnostics[ix]
4681 .1
4682 .groups(language_server_id, &mut groups, self);
4683 }
4684 } else {
4685 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4686 diagnostics.groups(*language_server_id, &mut groups, self);
4687 }
4688 }
4689
4690 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4691 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4692 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4693 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4694 });
4695
4696 groups
4697 }
4698
4699 /// Returns an iterator over the diagnostics for the given group.
4700 pub fn diagnostic_group<O>(
4701 &self,
4702 group_id: usize,
4703 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4704 where
4705 O: FromAnchor + 'static,
4706 {
4707 self.diagnostics
4708 .iter()
4709 .flat_map(move |(_, set)| set.group(group_id, self))
4710 }
4711
4712 /// An integer version number that accounts for all updates besides
4713 /// the buffer's text itself (which is versioned via a version vector).
4714 pub fn non_text_state_update_count(&self) -> usize {
4715 self.non_text_state_update_count
4716 }
4717
4718 /// An integer version that changes when the buffer's syntax changes.
4719 pub fn syntax_update_count(&self) -> usize {
4720 self.syntax.update_count()
4721 }
4722
4723 /// Returns a snapshot of underlying file.
4724 pub fn file(&self) -> Option<&Arc<dyn File>> {
4725 self.file.as_ref()
4726 }
4727
4728 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4729 if let Some(file) = self.file() {
4730 if file.path().file_name().is_none() || include_root {
4731 Some(file.full_path(cx).to_string_lossy().into_owned())
4732 } else {
4733 Some(file.path().display(file.path_style(cx)).to_string())
4734 }
4735 } else {
4736 None
4737 }
4738 }
4739
4740 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4741 let query_str = query.fuzzy_contents;
4742 if query_str.is_some_and(|query| query.is_empty()) {
4743 return BTreeMap::default();
4744 }
4745
4746 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4747 language,
4748 override_id: None,
4749 }));
4750
4751 let mut query_ix = 0;
4752 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4753 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4754
4755 let mut words = BTreeMap::default();
4756 let mut current_word_start_ix = None;
4757 let mut chunk_ix = query.range.start;
4758 for chunk in self.chunks(query.range, false) {
4759 for (i, c) in chunk.text.char_indices() {
4760 let ix = chunk_ix + i;
4761 if classifier.is_word(c) {
4762 if current_word_start_ix.is_none() {
4763 current_word_start_ix = Some(ix);
4764 }
4765
4766 if let Some(query_chars) = &query_chars
4767 && query_ix < query_len
4768 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4769 {
4770 query_ix += 1;
4771 }
4772 continue;
4773 } else if let Some(word_start) = current_word_start_ix.take()
4774 && query_ix == query_len
4775 {
4776 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4777 let mut word_text = self.text_for_range(word_start..ix).peekable();
4778 let first_char = word_text
4779 .peek()
4780 .and_then(|first_chunk| first_chunk.chars().next());
4781 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4782 if !query.skip_digits
4783 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4784 {
4785 words.insert(word_text.collect(), word_range);
4786 }
4787 }
4788 query_ix = 0;
4789 }
4790 chunk_ix += chunk.text.len();
4791 }
4792
4793 words
4794 }
4795}
4796
4797pub struct WordsQuery<'a> {
4798 /// Only returns words with all chars from the fuzzy string in them.
4799 pub fuzzy_contents: Option<&'a str>,
4800 /// Skips words that start with a digit.
4801 pub skip_digits: bool,
4802 /// Buffer offset range, to look for words.
4803 pub range: Range<usize>,
4804}
4805
4806fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4807 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4808}
4809
4810fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4811 let mut result = IndentSize::spaces(0);
4812 for c in text {
4813 let kind = match c {
4814 ' ' => IndentKind::Space,
4815 '\t' => IndentKind::Tab,
4816 _ => break,
4817 };
4818 if result.len == 0 {
4819 result.kind = kind;
4820 }
4821 result.len += 1;
4822 }
4823 result
4824}
4825
4826impl Clone for BufferSnapshot {
4827 fn clone(&self) -> Self {
4828 Self {
4829 text: self.text.clone(),
4830 syntax: self.syntax.clone(),
4831 file: self.file.clone(),
4832 remote_selections: self.remote_selections.clone(),
4833 diagnostics: self.diagnostics.clone(),
4834 language: self.language.clone(),
4835 non_text_state_update_count: self.non_text_state_update_count,
4836 }
4837 }
4838}
4839
4840impl Deref for BufferSnapshot {
4841 type Target = text::BufferSnapshot;
4842
4843 fn deref(&self) -> &Self::Target {
4844 &self.text
4845 }
4846}
4847
4848unsafe impl Send for BufferChunks<'_> {}
4849
4850impl<'a> BufferChunks<'a> {
4851 pub(crate) fn new(
4852 text: &'a Rope,
4853 range: Range<usize>,
4854 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4855 diagnostics: bool,
4856 buffer_snapshot: Option<&'a BufferSnapshot>,
4857 ) -> Self {
4858 let mut highlights = None;
4859 if let Some((captures, highlight_maps)) = syntax {
4860 highlights = Some(BufferChunkHighlights {
4861 captures,
4862 next_capture: None,
4863 stack: Default::default(),
4864 highlight_maps,
4865 })
4866 }
4867
4868 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4869 let chunks = text.chunks_in_range(range.clone());
4870
4871 let mut this = BufferChunks {
4872 range,
4873 buffer_snapshot,
4874 chunks,
4875 diagnostic_endpoints,
4876 error_depth: 0,
4877 warning_depth: 0,
4878 information_depth: 0,
4879 hint_depth: 0,
4880 unnecessary_depth: 0,
4881 underline: true,
4882 highlights,
4883 };
4884 this.initialize_diagnostic_endpoints();
4885 this
4886 }
4887
4888 /// Seeks to the given byte offset in the buffer.
4889 pub fn seek(&mut self, range: Range<usize>) {
4890 let old_range = std::mem::replace(&mut self.range, range.clone());
4891 self.chunks.set_range(self.range.clone());
4892 if let Some(highlights) = self.highlights.as_mut() {
4893 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4894 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4895 highlights
4896 .stack
4897 .retain(|(end_offset, _)| *end_offset > range.start);
4898 if let Some(capture) = &highlights.next_capture
4899 && range.start >= capture.node.start_byte()
4900 {
4901 let next_capture_end = capture.node.end_byte();
4902 if range.start < next_capture_end {
4903 highlights.stack.push((
4904 next_capture_end,
4905 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4906 ));
4907 }
4908 highlights.next_capture.take();
4909 }
4910 } else if let Some(snapshot) = self.buffer_snapshot {
4911 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4912 *highlights = BufferChunkHighlights {
4913 captures,
4914 next_capture: None,
4915 stack: Default::default(),
4916 highlight_maps,
4917 };
4918 } else {
4919 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4920 // Seeking such BufferChunks is not supported.
4921 debug_assert!(
4922 false,
4923 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4924 );
4925 }
4926
4927 highlights.captures.set_byte_range(self.range.clone());
4928 self.initialize_diagnostic_endpoints();
4929 }
4930 }
4931
4932 fn initialize_diagnostic_endpoints(&mut self) {
4933 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4934 && let Some(buffer) = self.buffer_snapshot
4935 {
4936 let mut diagnostic_endpoints = Vec::new();
4937 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4938 diagnostic_endpoints.push(DiagnosticEndpoint {
4939 offset: entry.range.start,
4940 is_start: true,
4941 severity: entry.diagnostic.severity,
4942 is_unnecessary: entry.diagnostic.is_unnecessary,
4943 underline: entry.diagnostic.underline,
4944 });
4945 diagnostic_endpoints.push(DiagnosticEndpoint {
4946 offset: entry.range.end,
4947 is_start: false,
4948 severity: entry.diagnostic.severity,
4949 is_unnecessary: entry.diagnostic.is_unnecessary,
4950 underline: entry.diagnostic.underline,
4951 });
4952 }
4953 diagnostic_endpoints
4954 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4955 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4956 self.hint_depth = 0;
4957 self.error_depth = 0;
4958 self.warning_depth = 0;
4959 self.information_depth = 0;
4960 }
4961 }
4962
4963 /// The current byte offset in the buffer.
4964 pub fn offset(&self) -> usize {
4965 self.range.start
4966 }
4967
4968 pub fn range(&self) -> Range<usize> {
4969 self.range.clone()
4970 }
4971
4972 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4973 let depth = match endpoint.severity {
4974 DiagnosticSeverity::ERROR => &mut self.error_depth,
4975 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4976 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4977 DiagnosticSeverity::HINT => &mut self.hint_depth,
4978 _ => return,
4979 };
4980 if endpoint.is_start {
4981 *depth += 1;
4982 } else {
4983 *depth -= 1;
4984 }
4985
4986 if endpoint.is_unnecessary {
4987 if endpoint.is_start {
4988 self.unnecessary_depth += 1;
4989 } else {
4990 self.unnecessary_depth -= 1;
4991 }
4992 }
4993 }
4994
4995 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4996 if self.error_depth > 0 {
4997 Some(DiagnosticSeverity::ERROR)
4998 } else if self.warning_depth > 0 {
4999 Some(DiagnosticSeverity::WARNING)
5000 } else if self.information_depth > 0 {
5001 Some(DiagnosticSeverity::INFORMATION)
5002 } else if self.hint_depth > 0 {
5003 Some(DiagnosticSeverity::HINT)
5004 } else {
5005 None
5006 }
5007 }
5008
5009 fn current_code_is_unnecessary(&self) -> bool {
5010 self.unnecessary_depth > 0
5011 }
5012}
5013
5014impl<'a> Iterator for BufferChunks<'a> {
5015 type Item = Chunk<'a>;
5016
5017 fn next(&mut self) -> Option<Self::Item> {
5018 let mut next_capture_start = usize::MAX;
5019 let mut next_diagnostic_endpoint = usize::MAX;
5020
5021 if let Some(highlights) = self.highlights.as_mut() {
5022 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5023 if *parent_capture_end <= self.range.start {
5024 highlights.stack.pop();
5025 } else {
5026 break;
5027 }
5028 }
5029
5030 if highlights.next_capture.is_none() {
5031 highlights.next_capture = highlights.captures.next();
5032 }
5033
5034 while let Some(capture) = highlights.next_capture.as_ref() {
5035 if self.range.start < capture.node.start_byte() {
5036 next_capture_start = capture.node.start_byte();
5037 break;
5038 } else {
5039 let highlight_id =
5040 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5041 highlights
5042 .stack
5043 .push((capture.node.end_byte(), highlight_id));
5044 highlights.next_capture = highlights.captures.next();
5045 }
5046 }
5047 }
5048
5049 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5050 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5051 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5052 if endpoint.offset <= self.range.start {
5053 self.update_diagnostic_depths(endpoint);
5054 diagnostic_endpoints.next();
5055 self.underline = endpoint.underline;
5056 } else {
5057 next_diagnostic_endpoint = endpoint.offset;
5058 break;
5059 }
5060 }
5061 }
5062 self.diagnostic_endpoints = diagnostic_endpoints;
5063
5064 if let Some(ChunkBitmaps {
5065 text: chunk,
5066 chars: chars_map,
5067 tabs,
5068 }) = self.chunks.peek_with_bitmaps()
5069 {
5070 let chunk_start = self.range.start;
5071 let mut chunk_end = (self.chunks.offset() + chunk.len())
5072 .min(next_capture_start)
5073 .min(next_diagnostic_endpoint);
5074 let mut highlight_id = None;
5075 if let Some(highlights) = self.highlights.as_ref()
5076 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5077 {
5078 chunk_end = chunk_end.min(*parent_capture_end);
5079 highlight_id = Some(*parent_highlight_id);
5080 }
5081 let bit_start = chunk_start - self.chunks.offset();
5082 let bit_end = chunk_end - self.chunks.offset();
5083
5084 let slice = &chunk[bit_start..bit_end];
5085
5086 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5087 let tabs = (tabs >> bit_start) & mask;
5088 let chars = (chars_map >> bit_start) & mask;
5089
5090 self.range.start = chunk_end;
5091 if self.range.start == self.chunks.offset() + chunk.len() {
5092 self.chunks.next().unwrap();
5093 }
5094
5095 Some(Chunk {
5096 text: slice,
5097 syntax_highlight_id: highlight_id,
5098 underline: self.underline,
5099 diagnostic_severity: self.current_diagnostic_severity(),
5100 is_unnecessary: self.current_code_is_unnecessary(),
5101 tabs,
5102 chars,
5103 ..Chunk::default()
5104 })
5105 } else {
5106 None
5107 }
5108 }
5109}
5110
5111impl operation_queue::Operation for Operation {
5112 fn lamport_timestamp(&self) -> clock::Lamport {
5113 match self {
5114 Operation::Buffer(_) => {
5115 unreachable!("buffer operations should never be deferred at this layer")
5116 }
5117 Operation::UpdateDiagnostics {
5118 lamport_timestamp, ..
5119 }
5120 | Operation::UpdateSelections {
5121 lamport_timestamp, ..
5122 }
5123 | Operation::UpdateCompletionTriggers {
5124 lamport_timestamp, ..
5125 }
5126 | Operation::UpdateLineEnding {
5127 lamport_timestamp, ..
5128 } => *lamport_timestamp,
5129 }
5130 }
5131}
5132
5133impl Default for Diagnostic {
5134 fn default() -> Self {
5135 Self {
5136 source: Default::default(),
5137 source_kind: DiagnosticSourceKind::Other,
5138 code: None,
5139 code_description: None,
5140 severity: DiagnosticSeverity::ERROR,
5141 message: Default::default(),
5142 markdown: None,
5143 group_id: 0,
5144 is_primary: false,
5145 is_disk_based: false,
5146 is_unnecessary: false,
5147 underline: true,
5148 data: None,
5149 }
5150 }
5151}
5152
5153impl IndentSize {
5154 /// Returns an [`IndentSize`] representing the given spaces.
5155 pub fn spaces(len: u32) -> Self {
5156 Self {
5157 len,
5158 kind: IndentKind::Space,
5159 }
5160 }
5161
5162 /// Returns an [`IndentSize`] representing a tab.
5163 pub fn tab() -> Self {
5164 Self {
5165 len: 1,
5166 kind: IndentKind::Tab,
5167 }
5168 }
5169
5170 /// An iterator over the characters represented by this [`IndentSize`].
5171 pub fn chars(&self) -> impl Iterator<Item = char> {
5172 iter::repeat(self.char()).take(self.len as usize)
5173 }
5174
5175 /// The character representation of this [`IndentSize`].
5176 pub fn char(&self) -> char {
5177 match self.kind {
5178 IndentKind::Space => ' ',
5179 IndentKind::Tab => '\t',
5180 }
5181 }
5182
5183 /// Consumes the current [`IndentSize`] and returns a new one that has
5184 /// been shrunk or enlarged by the given size along the given direction.
5185 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5186 match direction {
5187 Ordering::Less => {
5188 if self.kind == size.kind && self.len >= size.len {
5189 self.len -= size.len;
5190 }
5191 }
5192 Ordering::Equal => {}
5193 Ordering::Greater => {
5194 if self.len == 0 {
5195 self = size;
5196 } else if self.kind == size.kind {
5197 self.len += size.len;
5198 }
5199 }
5200 }
5201 self
5202 }
5203
5204 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5205 match self.kind {
5206 IndentKind::Space => self.len as usize,
5207 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5208 }
5209 }
5210}
5211
5212#[cfg(any(test, feature = "test-support"))]
5213pub struct TestFile {
5214 pub path: Arc<RelPath>,
5215 pub root_name: String,
5216 pub local_root: Option<PathBuf>,
5217}
5218
5219#[cfg(any(test, feature = "test-support"))]
5220impl File for TestFile {
5221 fn path(&self) -> &Arc<RelPath> {
5222 &self.path
5223 }
5224
5225 fn full_path(&self, _: &gpui::App) -> PathBuf {
5226 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5227 }
5228
5229 fn as_local(&self) -> Option<&dyn LocalFile> {
5230 if self.local_root.is_some() {
5231 Some(self)
5232 } else {
5233 None
5234 }
5235 }
5236
5237 fn disk_state(&self) -> DiskState {
5238 unimplemented!()
5239 }
5240
5241 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5242 self.path().file_name().unwrap_or(self.root_name.as_ref())
5243 }
5244
5245 fn worktree_id(&self, _: &App) -> WorktreeId {
5246 WorktreeId::from_usize(0)
5247 }
5248
5249 fn to_proto(&self, _: &App) -> rpc::proto::File {
5250 unimplemented!()
5251 }
5252
5253 fn is_private(&self) -> bool {
5254 false
5255 }
5256
5257 fn path_style(&self, _cx: &App) -> PathStyle {
5258 PathStyle::local()
5259 }
5260}
5261
5262#[cfg(any(test, feature = "test-support"))]
5263impl LocalFile for TestFile {
5264 fn abs_path(&self, _cx: &App) -> PathBuf {
5265 PathBuf::from(self.local_root.as_ref().unwrap())
5266 .join(&self.root_name)
5267 .join(self.path.as_std_path())
5268 }
5269
5270 fn load(
5271 &self,
5272 _cx: &App,
5273 _encoding: Encoding,
5274 _force: bool,
5275 _detect_utf16: bool,
5276 _buffer_encoding: Option<Arc<Encoding>>,
5277 ) -> Task<Result<String>> {
5278 unimplemented!()
5279 }
5280
5281 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5282 unimplemented!()
5283 }
5284}
5285
5286pub(crate) fn contiguous_ranges(
5287 values: impl Iterator<Item = u32>,
5288 max_len: usize,
5289) -> impl Iterator<Item = Range<u32>> {
5290 let mut values = values;
5291 let mut current_range: Option<Range<u32>> = None;
5292 std::iter::from_fn(move || {
5293 loop {
5294 if let Some(value) = values.next() {
5295 if let Some(range) = &mut current_range
5296 && value == range.end
5297 && range.len() < max_len
5298 {
5299 range.end += 1;
5300 continue;
5301 }
5302
5303 let prev_range = current_range.clone();
5304 current_range = Some(value..(value + 1));
5305 if prev_range.is_some() {
5306 return prev_range;
5307 }
5308 } else {
5309 return current_range.take();
5310 }
5311 }
5312 })
5313}
5314
5315#[derive(Default, Debug)]
5316pub struct CharClassifier {
5317 scope: Option<LanguageScope>,
5318 scope_context: Option<CharScopeContext>,
5319 ignore_punctuation: bool,
5320}
5321
5322impl CharClassifier {
5323 pub fn new(scope: Option<LanguageScope>) -> Self {
5324 Self {
5325 scope,
5326 scope_context: None,
5327 ignore_punctuation: false,
5328 }
5329 }
5330
5331 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5332 Self {
5333 scope_context,
5334 ..self
5335 }
5336 }
5337
5338 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5339 Self {
5340 ignore_punctuation,
5341 ..self
5342 }
5343 }
5344
5345 pub fn is_whitespace(&self, c: char) -> bool {
5346 self.kind(c) == CharKind::Whitespace
5347 }
5348
5349 pub fn is_word(&self, c: char) -> bool {
5350 self.kind(c) == CharKind::Word
5351 }
5352
5353 pub fn is_punctuation(&self, c: char) -> bool {
5354 self.kind(c) == CharKind::Punctuation
5355 }
5356
5357 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5358 if c.is_alphanumeric() || c == '_' {
5359 return CharKind::Word;
5360 }
5361
5362 if let Some(scope) = &self.scope {
5363 let characters = match self.scope_context {
5364 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5365 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5366 None => scope.word_characters(),
5367 };
5368 if let Some(characters) = characters
5369 && characters.contains(&c)
5370 {
5371 return CharKind::Word;
5372 }
5373 }
5374
5375 if c.is_whitespace() {
5376 return CharKind::Whitespace;
5377 }
5378
5379 if ignore_punctuation {
5380 CharKind::Word
5381 } else {
5382 CharKind::Punctuation
5383 }
5384 }
5385
5386 pub fn kind(&self, c: char) -> CharKind {
5387 self.kind_with(c, self.ignore_punctuation)
5388 }
5389}
5390
5391/// Find all of the ranges of whitespace that occur at the ends of lines
5392/// in the given rope.
5393///
5394/// This could also be done with a regex search, but this implementation
5395/// avoids copying text.
5396pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5397 let mut ranges = Vec::new();
5398
5399 let mut offset = 0;
5400 let mut prev_chunk_trailing_whitespace_range = 0..0;
5401 for chunk in rope.chunks() {
5402 let mut prev_line_trailing_whitespace_range = 0..0;
5403 for (i, line) in chunk.split('\n').enumerate() {
5404 let line_end_offset = offset + line.len();
5405 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5406 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5407
5408 if i == 0 && trimmed_line_len == 0 {
5409 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5410 }
5411 if !prev_line_trailing_whitespace_range.is_empty() {
5412 ranges.push(prev_line_trailing_whitespace_range);
5413 }
5414
5415 offset = line_end_offset + 1;
5416 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5417 }
5418
5419 offset -= 1;
5420 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5421 }
5422
5423 if !prev_chunk_trailing_whitespace_range.is_empty() {
5424 ranges.push(prev_chunk_trailing_whitespace_range);
5425 }
5426
5427 ranges
5428}