1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encodings::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<Encoding>,
131 pub observe_file_encoding: Option<gpui::Subscription>,
132}
133
134#[derive(Copy, Clone, Debug, PartialEq, Eq)]
135pub enum ParseStatus {
136 Idle,
137 Parsing,
138}
139
140struct BufferBranchState {
141 base_buffer: Entity<Buffer>,
142 merged_operations: Vec<Lamport>,
143}
144
145/// An immutable, cheaply cloneable representation of a fixed
146/// state of a buffer.
147pub struct BufferSnapshot {
148 pub text: text::BufferSnapshot,
149 pub syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191impl From<settings::CursorShape> for CursorShape {
192 fn from(shape: settings::CursorShape) -> Self {
193 match shape {
194 settings::CursorShape::Bar => CursorShape::Bar,
195 settings::CursorShape::Block => CursorShape::Block,
196 settings::CursorShape::Underline => CursorShape::Underline,
197 settings::CursorShape::Hollow => CursorShape::Hollow,
198 }
199 }
200}
201
202#[derive(Clone, Debug)]
203struct SelectionSet {
204 line_mode: bool,
205 cursor_shape: CursorShape,
206 selections: Arc<[Selection<Anchor>]>,
207 lamport_timestamp: clock::Lamport,
208}
209
210/// A diagnostic associated with a certain range of a buffer.
211#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
212pub struct Diagnostic {
213 /// The name of the service that produced this diagnostic.
214 pub source: Option<String>,
215 /// A machine-readable code that identifies this diagnostic.
216 pub code: Option<NumberOrString>,
217 pub code_description: Option<lsp::Uri>,
218 /// Whether this diagnostic is a hint, warning, or error.
219 pub severity: DiagnosticSeverity,
220 /// The human-readable message associated with this diagnostic.
221 pub message: String,
222 /// The human-readable message (in markdown format)
223 pub markdown: Option<String>,
224 /// An id that identifies the group to which this diagnostic belongs.
225 ///
226 /// When a language server produces a diagnostic with
227 /// one or more associated diagnostics, those diagnostics are all
228 /// assigned a single group ID.
229 pub group_id: usize,
230 /// Whether this diagnostic is the primary diagnostic for its group.
231 ///
232 /// In a given group, the primary diagnostic is the top-level diagnostic
233 /// returned by the language server. The non-primary diagnostics are the
234 /// associated diagnostics.
235 pub is_primary: bool,
236 /// Whether this diagnostic is considered to originate from an analysis of
237 /// files on disk, as opposed to any unsaved buffer contents. This is a
238 /// property of a given diagnostic source, and is configured for a given
239 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
240 /// for the language server.
241 pub is_disk_based: bool,
242 /// Whether this diagnostic marks unnecessary code.
243 pub is_unnecessary: bool,
244 /// Quick separation of diagnostics groups based by their source.
245 pub source_kind: DiagnosticSourceKind,
246 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
247 pub data: Option<Value>,
248 /// Whether to underline the corresponding text range in the editor.
249 pub underline: bool,
250}
251
252#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
253pub enum DiagnosticSourceKind {
254 Pulled,
255 Pushed,
256 Other,
257}
258
259/// An operation used to synchronize this buffer with its other replicas.
260#[derive(Clone, Debug, PartialEq)]
261pub enum Operation {
262 /// A text operation.
263 Buffer(text::Operation),
264
265 /// An update to the buffer's diagnostics.
266 UpdateDiagnostics {
267 /// The id of the language server that produced the new diagnostics.
268 server_id: LanguageServerId,
269 /// The diagnostics.
270 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 },
274
275 /// An update to the most recent selections in this buffer.
276 UpdateSelections {
277 /// The selections.
278 selections: Arc<[Selection<Anchor>]>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// Whether the selections are in 'line mode'.
282 line_mode: bool,
283 /// The [`CursorShape`] associated with these selections.
284 cursor_shape: CursorShape,
285 },
286
287 /// An update to the characters that should trigger autocompletion
288 /// for this buffer.
289 UpdateCompletionTriggers {
290 /// The characters that trigger autocompletion.
291 triggers: Vec<String>,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 /// The language server ID.
295 server_id: LanguageServerId,
296 },
297
298 /// An update to the line ending type of this buffer.
299 UpdateLineEnding {
300 /// The line ending type.
301 line_ending: LineEnding,
302 /// The buffer's lamport timestamp.
303 lamport_timestamp: clock::Lamport,
304 },
305}
306
307/// An event that occurs in a buffer.
308#[derive(Clone, Debug, PartialEq)]
309pub enum BufferEvent {
310 /// The buffer was changed in a way that must be
311 /// propagated to its other replicas.
312 Operation {
313 operation: Operation,
314 is_local: bool,
315 },
316 /// The buffer was edited.
317 Edited,
318 /// The buffer's `dirty` bit changed.
319 DirtyChanged,
320 /// The buffer was saved.
321 Saved,
322 /// The buffer's file was changed on disk.
323 FileHandleChanged,
324 /// The buffer was reloaded.
325 Reloaded,
326 /// The buffer is in need of a reload
327 ReloadNeeded,
328 /// The buffer's language was changed.
329 LanguageChanged,
330 /// The buffer's syntax trees were updated.
331 Reparsed,
332 /// The buffer's diagnostics were updated.
333 DiagnosticsUpdated,
334 /// The buffer gained or lost editing capabilities.
335 CapabilityChanged,
336}
337
338/// The file associated with a buffer.
339pub trait File: Send + Sync + Any {
340 /// Returns the [`LocalFile`] associated with this file, if the
341 /// file is local.
342 fn as_local(&self) -> Option<&dyn LocalFile>;
343
344 /// Returns whether this file is local.
345 fn is_local(&self) -> bool {
346 self.as_local().is_some()
347 }
348
349 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
350 /// only available in some states, such as modification time.
351 fn disk_state(&self) -> DiskState;
352
353 /// Returns the path of this file relative to the worktree's root directory.
354 fn path(&self) -> &Arc<RelPath>;
355
356 /// Returns the path of this file relative to the worktree's parent directory (this means it
357 /// includes the name of the worktree's root folder).
358 fn full_path(&self, cx: &App) -> PathBuf;
359
360 /// Returns the path style of this file.
361 fn path_style(&self, cx: &App) -> PathStyle;
362
363 /// Returns the last component of this handle's absolute path. If this handle refers to the root
364 /// of its worktree, then this method will return the name of the worktree itself.
365 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
366
367 /// Returns the id of the worktree to which this file belongs.
368 ///
369 /// This is needed for looking up project-specific settings.
370 fn worktree_id(&self, cx: &App) -> WorktreeId;
371
372 /// Converts this file into a protobuf message.
373 fn to_proto(&self, cx: &App) -> rpc::proto::File;
374
375 /// Return whether Zed considers this to be a private file.
376 fn is_private(&self) -> bool;
377
378 fn encoding(&self) -> Option<Arc<Encoding>> {
379 unimplemented!()
380 }
381}
382
383/// The file's storage status - whether it's stored (`Present`), and if so when it was last
384/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
385/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
386/// indicator for new files.
387#[derive(Copy, Clone, Debug, PartialEq)]
388pub enum DiskState {
389 /// File created in Zed that has not been saved.
390 New,
391 /// File present on the filesystem.
392 Present { mtime: MTime },
393 /// Deleted file that was previously present.
394 Deleted,
395}
396
397impl DiskState {
398 /// Returns the file's last known modification time on disk.
399 pub fn mtime(self) -> Option<MTime> {
400 match self {
401 DiskState::New => None,
402 DiskState::Present { mtime } => Some(mtime),
403 DiskState::Deleted => None,
404 }
405 }
406
407 pub fn exists(&self) -> bool {
408 match self {
409 DiskState::New => false,
410 DiskState::Present { .. } => true,
411 DiskState::Deleted => false,
412 }
413 }
414}
415
416/// The file associated with a buffer, in the case where the file is on the local disk.
417pub trait LocalFile: File {
418 /// Returns the absolute path of this file
419 fn abs_path(&self, cx: &App) -> PathBuf;
420
421 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
422 fn load(
423 &self,
424 cx: &App,
425 encoding: Encoding,
426 force: bool,
427 detect_utf16: bool,
428 buffer_encoding: Option<Arc<Encoding>>,
429 ) -> Task<Result<String>>;
430
431 /// Loads the file's contents from disk.
432 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
433}
434
435/// The auto-indent behavior associated with an editing operation.
436/// For some editing operations, each affected line of text has its
437/// indentation recomputed. For other operations, the entire block
438/// of edited text is adjusted uniformly.
439#[derive(Clone, Debug)]
440pub enum AutoindentMode {
441 /// Indent each line of inserted text.
442 EachLine,
443 /// Apply the same indentation adjustment to all of the lines
444 /// in a given insertion.
445 Block {
446 /// The original indentation column of the first line of each
447 /// insertion, if it has been copied.
448 ///
449 /// Knowing this makes it possible to preserve the relative indentation
450 /// of every line in the insertion from when it was copied.
451 ///
452 /// If the original indent column is `a`, and the first line of insertion
453 /// is then auto-indented to column `b`, then every other line of
454 /// the insertion will be auto-indented to column `b - a`
455 original_indent_columns: Vec<Option<u32>>,
456 },
457}
458
459#[derive(Clone)]
460struct AutoindentRequest {
461 before_edit: BufferSnapshot,
462 entries: Vec<AutoindentRequestEntry>,
463 is_block_mode: bool,
464 ignore_empty_lines: bool,
465}
466
467#[derive(Debug, Clone)]
468struct AutoindentRequestEntry {
469 /// A range of the buffer whose indentation should be adjusted.
470 range: Range<Anchor>,
471 /// Whether or not these lines should be considered brand new, for the
472 /// purpose of auto-indent. When text is not new, its indentation will
473 /// only be adjusted if the suggested indentation level has *changed*
474 /// since the edit was made.
475 first_line_is_new: bool,
476 indent_size: IndentSize,
477 original_indent_column: Option<u32>,
478}
479
480#[derive(Debug)]
481struct IndentSuggestion {
482 basis_row: u32,
483 delta: Ordering,
484 within_error: bool,
485}
486
487struct BufferChunkHighlights<'a> {
488 captures: SyntaxMapCaptures<'a>,
489 next_capture: Option<SyntaxMapCapture<'a>>,
490 stack: Vec<(usize, HighlightId)>,
491 highlight_maps: Vec<HighlightMap>,
492}
493
494/// An iterator that yields chunks of a buffer's text, along with their
495/// syntax highlights and diagnostic status.
496pub struct BufferChunks<'a> {
497 buffer_snapshot: Option<&'a BufferSnapshot>,
498 range: Range<usize>,
499 chunks: text::Chunks<'a>,
500 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
501 error_depth: usize,
502 warning_depth: usize,
503 information_depth: usize,
504 hint_depth: usize,
505 unnecessary_depth: usize,
506 underline: bool,
507 highlights: Option<BufferChunkHighlights<'a>>,
508}
509
510/// A chunk of a buffer's text, along with its syntax highlight and
511/// diagnostic status.
512#[derive(Clone, Debug, Default)]
513pub struct Chunk<'a> {
514 /// The text of the chunk.
515 pub text: &'a str,
516 /// The syntax highlighting style of the chunk.
517 pub syntax_highlight_id: Option<HighlightId>,
518 /// The highlight style that has been applied to this chunk in
519 /// the editor.
520 pub highlight_style: Option<HighlightStyle>,
521 /// The severity of diagnostic associated with this chunk, if any.
522 pub diagnostic_severity: Option<DiagnosticSeverity>,
523 /// A bitset of which characters are tabs in this string.
524 pub tabs: u128,
525 /// Bitmap of character indices in this chunk
526 pub chars: u128,
527 /// Whether this chunk of text is marked as unnecessary.
528 pub is_unnecessary: bool,
529 /// Whether this chunk of text was originally a tab character.
530 pub is_tab: bool,
531 /// Whether this chunk of text was originally an inlay.
532 pub is_inlay: bool,
533 /// Whether to underline the corresponding text range in the editor.
534 pub underline: bool,
535}
536
537/// A set of edits to a given version of a buffer, computed asynchronously.
538#[derive(Debug)]
539pub struct Diff {
540 pub base_version: clock::Global,
541 pub line_ending: LineEnding,
542 pub edits: Vec<(Range<usize>, Arc<str>)>,
543}
544
545#[derive(Debug, Clone, Copy)]
546pub(crate) struct DiagnosticEndpoint {
547 offset: usize,
548 is_start: bool,
549 underline: bool,
550 severity: DiagnosticSeverity,
551 is_unnecessary: bool,
552}
553
554/// A class of characters, used for characterizing a run of text.
555#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
556pub enum CharKind {
557 /// Whitespace.
558 Whitespace,
559 /// Punctuation.
560 Punctuation,
561 /// Word.
562 Word,
563}
564
565/// Context for character classification within a specific scope.
566#[derive(Copy, Clone, Eq, PartialEq, Debug)]
567pub enum CharScopeContext {
568 /// Character classification for completion queries.
569 ///
570 /// This context treats certain characters as word constituents that would
571 /// normally be considered punctuation, such as '-' in Tailwind classes
572 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
573 Completion,
574 /// Character classification for linked edits.
575 ///
576 /// This context handles characters that should be treated as part of
577 /// identifiers during linked editing operations, such as '.' in JSX
578 /// component names like `<Animated.View>`.
579 LinkedEdit,
580}
581
582/// A runnable is a set of data about a region that could be resolved into a task
583pub struct Runnable {
584 pub tags: SmallVec<[RunnableTag; 1]>,
585 pub language: Arc<Language>,
586 pub buffer: BufferId,
587}
588
589#[derive(Default, Clone, Debug)]
590pub struct HighlightedText {
591 pub text: SharedString,
592 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
593}
594
595#[derive(Default, Debug)]
596struct HighlightedTextBuilder {
597 pub text: String,
598 highlights: Vec<(Range<usize>, HighlightStyle)>,
599}
600
601impl HighlightedText {
602 pub fn from_buffer_range<T: ToOffset>(
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) -> Self {
609 let mut highlighted_text = HighlightedTextBuilder::default();
610 highlighted_text.add_text_from_buffer_range(
611 range,
612 snapshot,
613 syntax_snapshot,
614 override_style,
615 syntax_theme,
616 );
617 highlighted_text.build()
618 }
619
620 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
621 gpui::StyledText::new(self.text.clone())
622 .with_default_highlights(default_style, self.highlights.iter().cloned())
623 }
624
625 /// Returns the first line without leading whitespace unless highlighted
626 /// and a boolean indicating if there are more lines after
627 pub fn first_line_preview(self) -> (Self, bool) {
628 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
629 let first_line = &self.text[..newline_ix];
630
631 // Trim leading whitespace, unless an edit starts prior to it.
632 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
633 if let Some((first_highlight_range, _)) = self.highlights.first() {
634 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
635 }
636
637 let preview_text = &first_line[preview_start_ix..];
638 let preview_highlights = self
639 .highlights
640 .into_iter()
641 .skip_while(|(range, _)| range.end <= preview_start_ix)
642 .take_while(|(range, _)| range.start < newline_ix)
643 .filter_map(|(mut range, highlight)| {
644 range.start = range.start.saturating_sub(preview_start_ix);
645 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
646 if range.is_empty() {
647 None
648 } else {
649 Some((range, highlight))
650 }
651 });
652
653 let preview = Self {
654 text: SharedString::new(preview_text),
655 highlights: preview_highlights.collect(),
656 };
657
658 (preview, self.text.len() > newline_ix)
659 }
660}
661
662impl HighlightedTextBuilder {
663 pub fn build(self) -> HighlightedText {
664 HighlightedText {
665 text: self.text.into(),
666 highlights: self.highlights,
667 }
668 }
669
670 pub fn add_text_from_buffer_range<T: ToOffset>(
671 &mut self,
672 range: Range<T>,
673 snapshot: &text::BufferSnapshot,
674 syntax_snapshot: &SyntaxSnapshot,
675 override_style: Option<HighlightStyle>,
676 syntax_theme: &SyntaxTheme,
677 ) {
678 let range = range.to_offset(snapshot);
679 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
680 let start = self.text.len();
681 self.text.push_str(chunk.text);
682 let end = self.text.len();
683
684 if let Some(highlight_style) = chunk
685 .syntax_highlight_id
686 .and_then(|id| id.style(syntax_theme))
687 {
688 let highlight_style = override_style.map_or(highlight_style, |override_style| {
689 highlight_style.highlight(override_style)
690 });
691 self.highlights.push((start..end, highlight_style));
692 } else if let Some(override_style) = override_style {
693 self.highlights.push((start..end, override_style));
694 }
695 }
696 }
697
698 fn highlighted_chunks<'a>(
699 range: Range<usize>,
700 snapshot: &'a text::BufferSnapshot,
701 syntax_snapshot: &'a SyntaxSnapshot,
702 ) -> BufferChunks<'a> {
703 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
704 grammar
705 .highlights_config
706 .as_ref()
707 .map(|config| &config.query)
708 });
709
710 let highlight_maps = captures
711 .grammars()
712 .iter()
713 .map(|grammar| grammar.highlight_map())
714 .collect();
715
716 BufferChunks::new(
717 snapshot.as_rope(),
718 range,
719 Some((captures, highlight_maps)),
720 false,
721 None,
722 )
723 }
724}
725
726#[derive(Clone)]
727pub struct EditPreview {
728 old_snapshot: text::BufferSnapshot,
729 applied_edits_snapshot: text::BufferSnapshot,
730 syntax_snapshot: SyntaxSnapshot,
731}
732
733impl EditPreview {
734 pub fn highlight_edits(
735 &self,
736 current_snapshot: &BufferSnapshot,
737 edits: &[(Range<Anchor>, String)],
738 include_deletions: bool,
739 cx: &App,
740 ) -> HighlightedText {
741 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
742 return HighlightedText::default();
743 };
744
745 let mut highlighted_text = HighlightedTextBuilder::default();
746
747 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
748
749 let insertion_highlight_style = HighlightStyle {
750 background_color: Some(cx.theme().status().created_background),
751 ..Default::default()
752 };
753 let deletion_highlight_style = HighlightStyle {
754 background_color: Some(cx.theme().status().deleted_background),
755 ..Default::default()
756 };
757 let syntax_theme = cx.theme().syntax();
758
759 for (range, edit_text) in edits {
760 let edit_new_end_in_preview_snapshot = range
761 .end
762 .bias_right(&self.old_snapshot)
763 .to_offset(&self.applied_edits_snapshot);
764 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
765
766 let unchanged_range_in_preview_snapshot =
767 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
768 if !unchanged_range_in_preview_snapshot.is_empty() {
769 highlighted_text.add_text_from_buffer_range(
770 unchanged_range_in_preview_snapshot,
771 &self.applied_edits_snapshot,
772 &self.syntax_snapshot,
773 None,
774 syntax_theme,
775 );
776 }
777
778 let range_in_current_snapshot = range.to_offset(current_snapshot);
779 if include_deletions && !range_in_current_snapshot.is_empty() {
780 highlighted_text.add_text_from_buffer_range(
781 range_in_current_snapshot,
782 ¤t_snapshot.text,
783 ¤t_snapshot.syntax,
784 Some(deletion_highlight_style),
785 syntax_theme,
786 );
787 }
788
789 if !edit_text.is_empty() {
790 highlighted_text.add_text_from_buffer_range(
791 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
792 &self.applied_edits_snapshot,
793 &self.syntax_snapshot,
794 Some(insertion_highlight_style),
795 syntax_theme,
796 );
797 }
798
799 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
800 }
801
802 highlighted_text.add_text_from_buffer_range(
803 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
804 &self.applied_edits_snapshot,
805 &self.syntax_snapshot,
806 None,
807 syntax_theme,
808 );
809
810 highlighted_text.build()
811 }
812
813 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
814 let (first, _) = edits.first()?;
815 let (last, _) = edits.last()?;
816
817 let start = first
818 .start
819 .bias_left(&self.old_snapshot)
820 .to_point(&self.applied_edits_snapshot);
821 let end = last
822 .end
823 .bias_right(&self.old_snapshot)
824 .to_point(&self.applied_edits_snapshot);
825
826 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
827 let range = Point::new(start.row, 0)
828 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
829
830 Some(range.to_offset(&self.applied_edits_snapshot))
831 }
832}
833
834#[derive(Clone, Debug, PartialEq, Eq)]
835pub struct BracketMatch {
836 pub open_range: Range<usize>,
837 pub close_range: Range<usize>,
838 pub newline_only: bool,
839}
840
841impl Buffer {
842 /// Create a new buffer with the given base text.
843 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
844 Self::build(
845 TextBuffer::new(
846 ReplicaId::LOCAL,
847 cx.entity_id().as_non_zero_u64().into(),
848 base_text.into(),
849 &cx.background_executor(),
850 ),
851 None,
852 Capability::ReadWrite,
853 )
854 }
855
856 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
857 /// change the buffer's editing state
858 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
859 self.text = new;
860 self.saved_version = self.version.clone();
861 self.has_unsaved_edits.set((self.version.clone(), false));
862
863 self.was_changed();
864 cx.emit(BufferEvent::DirtyChanged);
865 cx.notify();
866 }
867
868 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
869 pub fn local_normalized(
870 base_text_normalized: Rope,
871 line_ending: LineEnding,
872 cx: &Context<Self>,
873 ) -> Self {
874 Self::build(
875 TextBuffer::new_normalized(
876 ReplicaId::LOCAL,
877 cx.entity_id().as_non_zero_u64().into(),
878 line_ending,
879 base_text_normalized,
880 ),
881 None,
882 Capability::ReadWrite,
883 )
884 }
885
886 /// Create a new buffer that is a replica of a remote buffer.
887 pub fn remote(
888 remote_id: BufferId,
889 replica_id: ReplicaId,
890 capability: Capability,
891 base_text: impl Into<String>,
892 cx: &BackgroundExecutor,
893 ) -> Self {
894 Self::build(
895 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
896 None,
897 capability,
898 )
899 }
900
901 /// Create a new buffer that is a replica of a remote buffer, populating its
902 /// state from the given protobuf message.
903 pub fn from_proto(
904 replica_id: ReplicaId,
905 capability: Capability,
906 message: proto::BufferState,
907 file: Option<Arc<dyn File>>,
908 cx: &BackgroundExecutor,
909 ) -> Result<Self> {
910 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
911 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
912 let mut this = Self::build(buffer, file, capability);
913 this.text.set_line_ending(proto::deserialize_line_ending(
914 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
915 ));
916 this.saved_version = proto::deserialize_version(&message.saved_version);
917 this.saved_mtime = message.saved_mtime.map(|time| time.into());
918 Ok(this)
919 }
920
921 /// Serialize the buffer's state to a protobuf message.
922 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
923 proto::BufferState {
924 id: self.remote_id().into(),
925 file: self.file.as_ref().map(|f| f.to_proto(cx)),
926 base_text: self.base_text().to_string(),
927 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
928 saved_version: proto::serialize_version(&self.saved_version),
929 saved_mtime: self.saved_mtime.map(|time| time.into()),
930 }
931 }
932
933 /// Serialize as protobufs all of the changes to the buffer since the given version.
934 pub fn serialize_ops(
935 &self,
936 since: Option<clock::Global>,
937 cx: &App,
938 ) -> Task<Vec<proto::Operation>> {
939 let mut operations = Vec::new();
940 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
941
942 operations.extend(self.remote_selections.iter().map(|(_, set)| {
943 proto::serialize_operation(&Operation::UpdateSelections {
944 selections: set.selections.clone(),
945 lamport_timestamp: set.lamport_timestamp,
946 line_mode: set.line_mode,
947 cursor_shape: set.cursor_shape,
948 })
949 }));
950
951 for (server_id, diagnostics) in &self.diagnostics {
952 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
953 lamport_timestamp: self.diagnostics_timestamp,
954 server_id: *server_id,
955 diagnostics: diagnostics.iter().cloned().collect(),
956 }));
957 }
958
959 for (server_id, completions) in &self.completion_triggers_per_language_server {
960 operations.push(proto::serialize_operation(
961 &Operation::UpdateCompletionTriggers {
962 triggers: completions.iter().cloned().collect(),
963 lamport_timestamp: self.completion_triggers_timestamp,
964 server_id: *server_id,
965 },
966 ));
967 }
968
969 let text_operations = self.text.operations().clone();
970 cx.background_spawn(async move {
971 let since = since.unwrap_or_default();
972 operations.extend(
973 text_operations
974 .iter()
975 .filter(|(_, op)| !since.observed(op.timestamp()))
976 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
977 );
978 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
979 operations
980 })
981 }
982
983 /// Assign a language to the buffer, returning the buffer.
984 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
985 self.set_language(Some(language), cx);
986 self
987 }
988
989 /// Returns the [`Capability`] of this buffer.
990 pub fn capability(&self) -> Capability {
991 self.capability
992 }
993
994 /// Whether this buffer can only be read.
995 pub fn read_only(&self) -> bool {
996 self.capability == Capability::ReadOnly
997 }
998
999 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1000 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1001 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1002 let snapshot = buffer.snapshot();
1003 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1004 Self {
1005 saved_mtime,
1006 saved_version: buffer.version(),
1007 preview_version: buffer.version(),
1008 reload_task: None,
1009 transaction_depth: 0,
1010 was_dirty_before_starting_transaction: None,
1011 has_unsaved_edits: Cell::new((buffer.version(), false)),
1012 text: buffer,
1013 branch_state: None,
1014 file,
1015 capability,
1016 syntax_map,
1017 reparse: None,
1018 non_text_state_update_count: 0,
1019 sync_parse_timeout: Duration::from_millis(1),
1020 parse_status: watch::channel(ParseStatus::Idle),
1021 autoindent_requests: Default::default(),
1022 wait_for_autoindent_txs: Default::default(),
1023 pending_autoindent: Default::default(),
1024 language: None,
1025 remote_selections: Default::default(),
1026 diagnostics: Default::default(),
1027 diagnostics_timestamp: Lamport::MIN,
1028 completion_triggers: Default::default(),
1029 completion_triggers_per_language_server: Default::default(),
1030 completion_triggers_timestamp: Lamport::MIN,
1031 deferred_ops: OperationQueue::new(),
1032 has_conflict: false,
1033 change_bits: Default::default(),
1034 _subscriptions: Vec::new(),
1035 encoding: Arc::new(Encoding::new(encodings::UTF_8)),
1036 observe_file_encoding: None,
1037 }
1038 }
1039
1040 pub fn build_snapshot(
1041 text: Rope,
1042 language: Option<Arc<Language>>,
1043 language_registry: Option<Arc<LanguageRegistry>>,
1044 cx: &mut App,
1045 ) -> impl Future<Output = BufferSnapshot> + use<> {
1046 let entity_id = cx.reserve_entity::<Self>().entity_id();
1047 let buffer_id = entity_id.as_non_zero_u64().into();
1048 async move {
1049 let text =
1050 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1051 .snapshot();
1052 let mut syntax = SyntaxMap::new(&text).snapshot();
1053 if let Some(language) = language.clone() {
1054 let language_registry = language_registry.clone();
1055 syntax.reparse(&text, language_registry, language);
1056 }
1057 BufferSnapshot {
1058 text,
1059 syntax,
1060 file: None,
1061 diagnostics: Default::default(),
1062 remote_selections: Default::default(),
1063 language,
1064 non_text_state_update_count: 0,
1065 }
1066 }
1067 }
1068
1069 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1070 let entity_id = cx.reserve_entity::<Self>().entity_id();
1071 let buffer_id = entity_id.as_non_zero_u64().into();
1072 let text = TextBuffer::new_normalized(
1073 ReplicaId::LOCAL,
1074 buffer_id,
1075 Default::default(),
1076 Rope::new(),
1077 )
1078 .snapshot();
1079 let syntax = SyntaxMap::new(&text).snapshot();
1080 BufferSnapshot {
1081 text,
1082 syntax,
1083 file: None,
1084 diagnostics: Default::default(),
1085 remote_selections: Default::default(),
1086 language: None,
1087 non_text_state_update_count: 0,
1088 }
1089 }
1090
1091 #[cfg(any(test, feature = "test-support"))]
1092 pub fn build_snapshot_sync(
1093 text: Rope,
1094 language: Option<Arc<Language>>,
1095 language_registry: Option<Arc<LanguageRegistry>>,
1096 cx: &mut App,
1097 ) -> BufferSnapshot {
1098 let entity_id = cx.reserve_entity::<Self>().entity_id();
1099 let buffer_id = entity_id.as_non_zero_u64().into();
1100 let text =
1101 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1102 .snapshot();
1103 let mut syntax = SyntaxMap::new(&text).snapshot();
1104 if let Some(language) = language.clone() {
1105 syntax.reparse(&text, language_registry, language);
1106 }
1107 BufferSnapshot {
1108 text,
1109 syntax,
1110 file: None,
1111 diagnostics: Default::default(),
1112 remote_selections: Default::default(),
1113 language,
1114 non_text_state_update_count: 0,
1115 }
1116 }
1117
1118 /// Retrieve a snapshot of the buffer's current state. This is computationally
1119 /// cheap, and allows reading from the buffer on a background thread.
1120 pub fn snapshot(&self) -> BufferSnapshot {
1121 let text = self.text.snapshot();
1122 let mut syntax_map = self.syntax_map.lock();
1123 syntax_map.interpolate(&text);
1124 let syntax = syntax_map.snapshot();
1125
1126 BufferSnapshot {
1127 text,
1128 syntax,
1129 file: self.file.clone(),
1130 remote_selections: self.remote_selections.clone(),
1131 diagnostics: self.diagnostics.clone(),
1132 language: self.language.clone(),
1133 non_text_state_update_count: self.non_text_state_update_count,
1134 }
1135 }
1136
1137 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1138 let this = cx.entity();
1139 cx.new(|cx| {
1140 let mut branch = Self {
1141 branch_state: Some(BufferBranchState {
1142 base_buffer: this.clone(),
1143 merged_operations: Default::default(),
1144 }),
1145 language: self.language.clone(),
1146 has_conflict: self.has_conflict,
1147 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1148 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1149 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1150 };
1151 if let Some(language_registry) = self.language_registry() {
1152 branch.set_language_registry(language_registry);
1153 }
1154
1155 // Reparse the branch buffer so that we get syntax highlighting immediately.
1156 branch.reparse(cx);
1157
1158 branch
1159 })
1160 }
1161
1162 pub fn preview_edits(
1163 &self,
1164 edits: Arc<[(Range<Anchor>, String)]>,
1165 cx: &App,
1166 ) -> Task<EditPreview> {
1167 let registry = self.language_registry();
1168 let language = self.language().cloned();
1169 let old_snapshot = self.text.snapshot();
1170 let mut branch_buffer = self.text.branch();
1171 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1172 let executor = cx.background_executor().clone();
1173 cx.background_spawn(async move {
1174 if !edits.is_empty() {
1175 if let Some(language) = language.clone() {
1176 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1177 }
1178
1179 branch_buffer.edit(edits.iter().cloned(), &executor);
1180 let snapshot = branch_buffer.snapshot();
1181 syntax_snapshot.interpolate(&snapshot);
1182
1183 if let Some(language) = language {
1184 syntax_snapshot.reparse(&snapshot, registry, language);
1185 }
1186 }
1187 EditPreview {
1188 old_snapshot,
1189 applied_edits_snapshot: branch_buffer.snapshot(),
1190 syntax_snapshot,
1191 }
1192 })
1193 }
1194
1195 /// Applies all of the changes in this buffer that intersect any of the
1196 /// given `ranges` to its base buffer.
1197 ///
1198 /// If `ranges` is empty, then all changes will be applied. This buffer must
1199 /// be a branch buffer to call this method.
1200 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1201 let Some(base_buffer) = self.base_buffer() else {
1202 debug_panic!("not a branch buffer");
1203 return;
1204 };
1205
1206 let mut ranges = if ranges.is_empty() {
1207 &[0..usize::MAX]
1208 } else {
1209 ranges.as_slice()
1210 }
1211 .iter()
1212 .peekable();
1213
1214 let mut edits = Vec::new();
1215 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1216 let mut is_included = false;
1217 while let Some(range) = ranges.peek() {
1218 if range.end < edit.new.start {
1219 ranges.next().unwrap();
1220 } else {
1221 if range.start <= edit.new.end {
1222 is_included = true;
1223 }
1224 break;
1225 }
1226 }
1227
1228 if is_included {
1229 edits.push((
1230 edit.old.clone(),
1231 self.text_for_range(edit.new.clone()).collect::<String>(),
1232 ));
1233 }
1234 }
1235
1236 let operation = base_buffer.update(cx, |base_buffer, cx| {
1237 // cx.emit(BufferEvent::DiffBaseChanged);
1238 base_buffer.edit(edits, None, cx)
1239 });
1240
1241 if let Some(operation) = operation
1242 && let Some(BufferBranchState {
1243 merged_operations, ..
1244 }) = &mut self.branch_state
1245 {
1246 merged_operations.push(operation);
1247 }
1248 }
1249
1250 fn on_base_buffer_event(
1251 &mut self,
1252 _: Entity<Buffer>,
1253 event: &BufferEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 let BufferEvent::Operation { operation, .. } = event else {
1257 return;
1258 };
1259 let Some(BufferBranchState {
1260 merged_operations, ..
1261 }) = &mut self.branch_state
1262 else {
1263 return;
1264 };
1265
1266 let mut operation_to_undo = None;
1267 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1268 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1269 {
1270 merged_operations.remove(ix);
1271 operation_to_undo = Some(operation.timestamp);
1272 }
1273
1274 self.apply_ops([operation.clone()], cx);
1275
1276 if let Some(timestamp) = operation_to_undo {
1277 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1278 self.undo_operations(counts, cx);
1279 }
1280 }
1281
1282 #[cfg(test)]
1283 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1284 &self.text
1285 }
1286
1287 /// Retrieve a snapshot of the buffer's raw text, without any
1288 /// language-related state like the syntax tree or diagnostics.
1289 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1290 self.text.snapshot()
1291 }
1292
1293 /// The file associated with the buffer, if any.
1294 pub fn file(&self) -> Option<&Arc<dyn File>> {
1295 self.file.as_ref()
1296 }
1297
1298 /// The version of the buffer that was last saved or reloaded from disk.
1299 pub fn saved_version(&self) -> &clock::Global {
1300 &self.saved_version
1301 }
1302
1303 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1304 pub fn saved_mtime(&self) -> Option<MTime> {
1305 self.saved_mtime
1306 }
1307
1308 /// Assign a language to the buffer.
1309 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1310 self.non_text_state_update_count += 1;
1311 self.syntax_map.lock().clear(&self.text);
1312 self.language = language;
1313 self.was_changed();
1314 self.reparse(cx);
1315 cx.emit(BufferEvent::LanguageChanged);
1316 }
1317
1318 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1319 /// other languages if parts of the buffer are written in different languages.
1320 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1321 self.syntax_map
1322 .lock()
1323 .set_language_registry(language_registry);
1324 }
1325
1326 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1327 self.syntax_map.lock().language_registry()
1328 }
1329
1330 /// Assign the line ending type to the buffer.
1331 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1332 self.text.set_line_ending(line_ending);
1333
1334 let lamport_timestamp = self.text.lamport_clock.tick();
1335 self.send_operation(
1336 Operation::UpdateLineEnding {
1337 line_ending,
1338 lamport_timestamp,
1339 },
1340 true,
1341 cx,
1342 );
1343 }
1344
1345 /// Assign the buffer a new [`Capability`].
1346 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1347 if self.capability != capability {
1348 self.capability = capability;
1349 cx.emit(BufferEvent::CapabilityChanged)
1350 }
1351 }
1352
1353 /// This method is called to signal that the buffer has been saved.
1354 pub fn did_save(
1355 &mut self,
1356 version: clock::Global,
1357 mtime: Option<MTime>,
1358 cx: &mut Context<Self>,
1359 ) {
1360 self.saved_version = version.clone();
1361 self.has_unsaved_edits.set((version, false));
1362 self.has_conflict = false;
1363 self.saved_mtime = mtime;
1364 self.was_changed();
1365 cx.emit(BufferEvent::Saved);
1366 cx.notify();
1367 }
1368
1369 /// Reloads the contents of the buffer from disk.
1370 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1371 let (tx, rx) = futures::channel::oneshot::channel();
1372 let encoding = (*self.encoding).clone();
1373
1374 let buffer_encoding = self.encoding.clone();
1375
1376 let prev_version = self.text.version();
1377 self.reload_task = Some(cx.spawn(async move |this, cx| {
1378 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1379 let file = this.file.as_ref()?.as_local()?;
1380
1381 Some((file.disk_state().mtime(), {
1382 file.load(cx, encoding, false, true, Some(buffer_encoding))
1383 }))
1384 })?
1385 else {
1386 return Ok(());
1387 };
1388
1389 let new_text = new_text.await?;
1390 let diff = this
1391 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1392 .await;
1393 this.update(cx, |this, cx| {
1394 if this.version() == diff.base_version {
1395 this.finalize_last_transaction();
1396 this.apply_diff(diff, cx);
1397 tx.send(this.finalize_last_transaction().cloned()).ok();
1398 this.has_conflict = false;
1399 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1400 } else {
1401 if !diff.edits.is_empty()
1402 || this
1403 .edits_since::<usize>(&diff.base_version)
1404 .next()
1405 .is_some()
1406 {
1407 this.has_conflict = true;
1408 }
1409
1410 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1411 }
1412
1413 this.reload_task.take();
1414 })
1415 }));
1416 rx
1417 }
1418
1419 /// This method is called to signal that the buffer has been reloaded.
1420 pub fn did_reload(
1421 &mut self,
1422 version: clock::Global,
1423 line_ending: LineEnding,
1424 mtime: Option<MTime>,
1425 cx: &mut Context<Self>,
1426 ) {
1427 self.saved_version = version;
1428 self.has_unsaved_edits
1429 .set((self.saved_version.clone(), false));
1430 self.text.set_line_ending(line_ending);
1431 self.saved_mtime = mtime;
1432 cx.emit(BufferEvent::Reloaded);
1433 cx.notify();
1434 }
1435
1436 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1437 self.file = Some(new_file);
1438 }
1439 /// Updates the [`File`] backing this buffer. This should be called when
1440 /// the file has changed or has been deleted.
1441 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1442 let was_dirty = self.is_dirty();
1443 let mut file_changed = false;
1444
1445 if let Some(old_file) = self.file.as_ref() {
1446 if new_file.path() != old_file.path() {
1447 file_changed = true;
1448 }
1449
1450 let old_state = old_file.disk_state();
1451 let new_state = new_file.disk_state();
1452 if old_state != new_state {
1453 file_changed = true;
1454 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1455 cx.emit(BufferEvent::ReloadNeeded)
1456 }
1457 }
1458 } else {
1459 file_changed = true;
1460 };
1461
1462 self.file = Some(new_file);
1463 if file_changed {
1464 self.was_changed();
1465 self.non_text_state_update_count += 1;
1466 if was_dirty != self.is_dirty() {
1467 cx.emit(BufferEvent::DirtyChanged);
1468 }
1469 cx.emit(BufferEvent::FileHandleChanged);
1470 cx.notify();
1471 }
1472 }
1473
1474 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1475 Some(self.branch_state.as_ref()?.base_buffer.clone())
1476 }
1477
1478 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1479 pub fn language(&self) -> Option<&Arc<Language>> {
1480 self.language.as_ref()
1481 }
1482
1483 /// Returns the [`Language`] at the given location.
1484 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1485 let offset = position.to_offset(self);
1486 let mut is_first = true;
1487 let start_anchor = self.anchor_before(offset);
1488 let end_anchor = self.anchor_after(offset);
1489 self.syntax_map
1490 .lock()
1491 .layers_for_range(offset..offset, &self.text, false)
1492 .filter(|layer| {
1493 if is_first {
1494 is_first = false;
1495 return true;
1496 }
1497
1498 layer
1499 .included_sub_ranges
1500 .map(|sub_ranges| {
1501 sub_ranges.iter().any(|sub_range| {
1502 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1503 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1504 !is_before_start && !is_after_end
1505 })
1506 })
1507 .unwrap_or(true)
1508 })
1509 .last()
1510 .map(|info| info.language.clone())
1511 .or_else(|| self.language.clone())
1512 }
1513
1514 /// Returns each [`Language`] for the active syntax layers at the given location.
1515 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1516 let offset = position.to_offset(self);
1517 let mut languages: Vec<Arc<Language>> = self
1518 .syntax_map
1519 .lock()
1520 .layers_for_range(offset..offset, &self.text, false)
1521 .map(|info| info.language.clone())
1522 .collect();
1523
1524 if languages.is_empty()
1525 && let Some(buffer_language) = self.language()
1526 {
1527 languages.push(buffer_language.clone());
1528 }
1529
1530 languages
1531 }
1532
1533 /// An integer version number that accounts for all updates besides
1534 /// the buffer's text itself (which is versioned via a version vector).
1535 pub fn non_text_state_update_count(&self) -> usize {
1536 self.non_text_state_update_count
1537 }
1538
1539 /// Whether the buffer is being parsed in the background.
1540 #[cfg(any(test, feature = "test-support"))]
1541 pub fn is_parsing(&self) -> bool {
1542 self.reparse.is_some()
1543 }
1544
1545 /// Indicates whether the buffer contains any regions that may be
1546 /// written in a language that hasn't been loaded yet.
1547 pub fn contains_unknown_injections(&self) -> bool {
1548 self.syntax_map.lock().contains_unknown_injections()
1549 }
1550
1551 #[cfg(any(test, feature = "test-support"))]
1552 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1553 self.sync_parse_timeout = timeout;
1554 }
1555
1556 /// Called after an edit to synchronize the buffer's main parse tree with
1557 /// the buffer's new underlying state.
1558 ///
1559 /// Locks the syntax map and interpolates the edits since the last reparse
1560 /// into the foreground syntax tree.
1561 ///
1562 /// Then takes a stable snapshot of the syntax map before unlocking it.
1563 /// The snapshot with the interpolated edits is sent to a background thread,
1564 /// where we ask Tree-sitter to perform an incremental parse.
1565 ///
1566 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1567 /// waiting on the parse to complete. As soon as it completes, we proceed
1568 /// synchronously, unless a 1ms timeout elapses.
1569 ///
1570 /// If we time out waiting on the parse, we spawn a second task waiting
1571 /// until the parse does complete and return with the interpolated tree still
1572 /// in the foreground. When the background parse completes, call back into
1573 /// the main thread and assign the foreground parse state.
1574 ///
1575 /// If the buffer or grammar changed since the start of the background parse,
1576 /// initiate an additional reparse recursively. To avoid concurrent parses
1577 /// for the same buffer, we only initiate a new parse if we are not already
1578 /// parsing in the background.
1579 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1580 if self.reparse.is_some() {
1581 return;
1582 }
1583 let language = if let Some(language) = self.language.clone() {
1584 language
1585 } else {
1586 return;
1587 };
1588
1589 let text = self.text_snapshot();
1590 let parsed_version = self.version();
1591
1592 let mut syntax_map = self.syntax_map.lock();
1593 syntax_map.interpolate(&text);
1594 let language_registry = syntax_map.language_registry();
1595 let mut syntax_snapshot = syntax_map.snapshot();
1596 drop(syntax_map);
1597
1598 let parse_task = cx.background_spawn({
1599 let language = language.clone();
1600 let language_registry = language_registry.clone();
1601 async move {
1602 syntax_snapshot.reparse(&text, language_registry, language);
1603 syntax_snapshot
1604 }
1605 });
1606
1607 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1608 match cx
1609 .background_executor()
1610 .block_with_timeout(self.sync_parse_timeout, parse_task)
1611 {
1612 Ok(new_syntax_snapshot) => {
1613 self.did_finish_parsing(new_syntax_snapshot, cx);
1614 self.reparse = None;
1615 }
1616 Err(parse_task) => {
1617 // todo(lw): hot foreground spawn
1618 self.reparse = Some(cx.spawn(async move |this, cx| {
1619 let new_syntax_map = cx.background_spawn(parse_task).await;
1620 this.update(cx, move |this, cx| {
1621 let grammar_changed = || {
1622 this.language.as_ref().is_none_or(|current_language| {
1623 !Arc::ptr_eq(&language, current_language)
1624 })
1625 };
1626 let language_registry_changed = || {
1627 new_syntax_map.contains_unknown_injections()
1628 && language_registry.is_some_and(|registry| {
1629 registry.version() != new_syntax_map.language_registry_version()
1630 })
1631 };
1632 let parse_again = this.version.changed_since(&parsed_version)
1633 || language_registry_changed()
1634 || grammar_changed();
1635 this.did_finish_parsing(new_syntax_map, cx);
1636 this.reparse = None;
1637 if parse_again {
1638 this.reparse(cx);
1639 }
1640 })
1641 .ok();
1642 }));
1643 }
1644 }
1645 }
1646
1647 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1648 self.was_changed();
1649 self.non_text_state_update_count += 1;
1650 self.syntax_map.lock().did_parse(syntax_snapshot);
1651 self.request_autoindent(cx);
1652 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1653 cx.emit(BufferEvent::Reparsed);
1654 cx.notify();
1655 }
1656
1657 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1658 self.parse_status.1.clone()
1659 }
1660
1661 /// Assign to the buffer a set of diagnostics created by a given language server.
1662 pub fn update_diagnostics(
1663 &mut self,
1664 server_id: LanguageServerId,
1665 diagnostics: DiagnosticSet,
1666 cx: &mut Context<Self>,
1667 ) {
1668 let lamport_timestamp = self.text.lamport_clock.tick();
1669 let op = Operation::UpdateDiagnostics {
1670 server_id,
1671 diagnostics: diagnostics.iter().cloned().collect(),
1672 lamport_timestamp,
1673 };
1674
1675 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1676 self.send_operation(op, true, cx);
1677 }
1678
1679 pub fn buffer_diagnostics(
1680 &self,
1681 for_server: Option<LanguageServerId>,
1682 ) -> Vec<&DiagnosticEntry<Anchor>> {
1683 match for_server {
1684 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1685 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1686 Err(_) => Vec::new(),
1687 },
1688 None => self
1689 .diagnostics
1690 .iter()
1691 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1692 .collect(),
1693 }
1694 }
1695
1696 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1697 if let Some(indent_sizes) = self.compute_autoindents() {
1698 let indent_sizes = cx.background_spawn(indent_sizes);
1699 match cx
1700 .background_executor()
1701 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1702 {
1703 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1704 Err(indent_sizes) => {
1705 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1706 let indent_sizes = indent_sizes.await;
1707 this.update(cx, |this, cx| {
1708 this.apply_autoindents(indent_sizes, cx);
1709 })
1710 .ok();
1711 }));
1712 }
1713 }
1714 } else {
1715 self.autoindent_requests.clear();
1716 for tx in self.wait_for_autoindent_txs.drain(..) {
1717 tx.send(()).ok();
1718 }
1719 }
1720 }
1721
1722 fn compute_autoindents(
1723 &self,
1724 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1725 let max_rows_between_yields = 100;
1726 let snapshot = self.snapshot();
1727 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1728 return None;
1729 }
1730
1731 let autoindent_requests = self.autoindent_requests.clone();
1732 Some(async move {
1733 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1734 for request in autoindent_requests {
1735 // Resolve each edited range to its row in the current buffer and in the
1736 // buffer before this batch of edits.
1737 let mut row_ranges = Vec::new();
1738 let mut old_to_new_rows = BTreeMap::new();
1739 let mut language_indent_sizes_by_new_row = Vec::new();
1740 for entry in &request.entries {
1741 let position = entry.range.start;
1742 let new_row = position.to_point(&snapshot).row;
1743 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1744 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1745
1746 if !entry.first_line_is_new {
1747 let old_row = position.to_point(&request.before_edit).row;
1748 old_to_new_rows.insert(old_row, new_row);
1749 }
1750 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1751 }
1752
1753 // Build a map containing the suggested indentation for each of the edited lines
1754 // with respect to the state of the buffer before these edits. This map is keyed
1755 // by the rows for these lines in the current state of the buffer.
1756 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1757 let old_edited_ranges =
1758 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1759 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1760 let mut language_indent_size = IndentSize::default();
1761 for old_edited_range in old_edited_ranges {
1762 let suggestions = request
1763 .before_edit
1764 .suggest_autoindents(old_edited_range.clone())
1765 .into_iter()
1766 .flatten();
1767 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1768 if let Some(suggestion) = suggestion {
1769 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1770
1771 // Find the indent size based on the language for this row.
1772 while let Some((row, size)) = language_indent_sizes.peek() {
1773 if *row > new_row {
1774 break;
1775 }
1776 language_indent_size = *size;
1777 language_indent_sizes.next();
1778 }
1779
1780 let suggested_indent = old_to_new_rows
1781 .get(&suggestion.basis_row)
1782 .and_then(|from_row| {
1783 Some(old_suggestions.get(from_row).copied()?.0)
1784 })
1785 .unwrap_or_else(|| {
1786 request
1787 .before_edit
1788 .indent_size_for_line(suggestion.basis_row)
1789 })
1790 .with_delta(suggestion.delta, language_indent_size);
1791 old_suggestions
1792 .insert(new_row, (suggested_indent, suggestion.within_error));
1793 }
1794 }
1795 yield_now().await;
1796 }
1797
1798 // Compute new suggestions for each line, but only include them in the result
1799 // if they differ from the old suggestion for that line.
1800 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1801 let mut language_indent_size = IndentSize::default();
1802 for (row_range, original_indent_column) in row_ranges {
1803 let new_edited_row_range = if request.is_block_mode {
1804 row_range.start..row_range.start + 1
1805 } else {
1806 row_range.clone()
1807 };
1808
1809 let suggestions = snapshot
1810 .suggest_autoindents(new_edited_row_range.clone())
1811 .into_iter()
1812 .flatten();
1813 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1814 if let Some(suggestion) = suggestion {
1815 // Find the indent size based on the language for this row.
1816 while let Some((row, size)) = language_indent_sizes.peek() {
1817 if *row > new_row {
1818 break;
1819 }
1820 language_indent_size = *size;
1821 language_indent_sizes.next();
1822 }
1823
1824 let suggested_indent = indent_sizes
1825 .get(&suggestion.basis_row)
1826 .copied()
1827 .map(|e| e.0)
1828 .unwrap_or_else(|| {
1829 snapshot.indent_size_for_line(suggestion.basis_row)
1830 })
1831 .with_delta(suggestion.delta, language_indent_size);
1832
1833 if old_suggestions.get(&new_row).is_none_or(
1834 |(old_indentation, was_within_error)| {
1835 suggested_indent != *old_indentation
1836 && (!suggestion.within_error || *was_within_error)
1837 },
1838 ) {
1839 indent_sizes.insert(
1840 new_row,
1841 (suggested_indent, request.ignore_empty_lines),
1842 );
1843 }
1844 }
1845 }
1846
1847 if let (true, Some(original_indent_column)) =
1848 (request.is_block_mode, original_indent_column)
1849 {
1850 let new_indent =
1851 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1852 *indent
1853 } else {
1854 snapshot.indent_size_for_line(row_range.start)
1855 };
1856 let delta = new_indent.len as i64 - original_indent_column as i64;
1857 if delta != 0 {
1858 for row in row_range.skip(1) {
1859 indent_sizes.entry(row).or_insert_with(|| {
1860 let mut size = snapshot.indent_size_for_line(row);
1861 if size.kind == new_indent.kind {
1862 match delta.cmp(&0) {
1863 Ordering::Greater => size.len += delta as u32,
1864 Ordering::Less => {
1865 size.len = size.len.saturating_sub(-delta as u32)
1866 }
1867 Ordering::Equal => {}
1868 }
1869 }
1870 (size, request.ignore_empty_lines)
1871 });
1872 }
1873 }
1874 }
1875
1876 yield_now().await;
1877 }
1878 }
1879
1880 indent_sizes
1881 .into_iter()
1882 .filter_map(|(row, (indent, ignore_empty_lines))| {
1883 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1884 None
1885 } else {
1886 Some((row, indent))
1887 }
1888 })
1889 .collect()
1890 })
1891 }
1892
1893 fn apply_autoindents(
1894 &mut self,
1895 indent_sizes: BTreeMap<u32, IndentSize>,
1896 cx: &mut Context<Self>,
1897 ) {
1898 self.autoindent_requests.clear();
1899 for tx in self.wait_for_autoindent_txs.drain(..) {
1900 tx.send(()).ok();
1901 }
1902
1903 let edits: Vec<_> = indent_sizes
1904 .into_iter()
1905 .filter_map(|(row, indent_size)| {
1906 let current_size = indent_size_for_line(self, row);
1907 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1908 })
1909 .collect();
1910
1911 let preserve_preview = self.preserve_preview();
1912 self.edit(edits, None, cx);
1913 if preserve_preview {
1914 self.refresh_preview();
1915 }
1916 }
1917
1918 /// Create a minimal edit that will cause the given row to be indented
1919 /// with the given size. After applying this edit, the length of the line
1920 /// will always be at least `new_size.len`.
1921 pub fn edit_for_indent_size_adjustment(
1922 row: u32,
1923 current_size: IndentSize,
1924 new_size: IndentSize,
1925 ) -> Option<(Range<Point>, String)> {
1926 if new_size.kind == current_size.kind {
1927 match new_size.len.cmp(¤t_size.len) {
1928 Ordering::Greater => {
1929 let point = Point::new(row, 0);
1930 Some((
1931 point..point,
1932 iter::repeat(new_size.char())
1933 .take((new_size.len - current_size.len) as usize)
1934 .collect::<String>(),
1935 ))
1936 }
1937
1938 Ordering::Less => Some((
1939 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1940 String::new(),
1941 )),
1942
1943 Ordering::Equal => None,
1944 }
1945 } else {
1946 Some((
1947 Point::new(row, 0)..Point::new(row, current_size.len),
1948 iter::repeat(new_size.char())
1949 .take(new_size.len as usize)
1950 .collect::<String>(),
1951 ))
1952 }
1953 }
1954
1955 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1956 /// and the given new text.
1957 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1958 let old_text = self.as_rope().clone();
1959 let base_version = self.version();
1960 cx.background_executor()
1961 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1962 let old_text = old_text.to_string();
1963 let line_ending = LineEnding::detect(&new_text);
1964 LineEnding::normalize(&mut new_text);
1965 let edits = text_diff(&old_text, &new_text);
1966 Diff {
1967 base_version,
1968 line_ending,
1969 edits,
1970 }
1971 })
1972 }
1973
1974 /// Spawns a background task that searches the buffer for any whitespace
1975 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1976 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1977 let old_text = self.as_rope().clone();
1978 let line_ending = self.line_ending();
1979 let base_version = self.version();
1980 cx.background_spawn(async move {
1981 let ranges = trailing_whitespace_ranges(&old_text);
1982 let empty = Arc::<str>::from("");
1983 Diff {
1984 base_version,
1985 line_ending,
1986 edits: ranges
1987 .into_iter()
1988 .map(|range| (range, empty.clone()))
1989 .collect(),
1990 }
1991 })
1992 }
1993
1994 /// Ensures that the buffer ends with a single newline character, and
1995 /// no other whitespace. Skips if the buffer is empty.
1996 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1997 let len = self.len();
1998 if len == 0 {
1999 return;
2000 }
2001 let mut offset = len;
2002 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2003 let non_whitespace_len = chunk
2004 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2005 .len();
2006 offset -= chunk.len();
2007 offset += non_whitespace_len;
2008 if non_whitespace_len != 0 {
2009 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2010 return;
2011 }
2012 break;
2013 }
2014 }
2015 self.edit([(offset..len, "\n")], None, cx);
2016 }
2017
2018 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2019 /// calculated, then adjust the diff to account for those changes, and discard any
2020 /// parts of the diff that conflict with those changes.
2021 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2022 let snapshot = self.snapshot();
2023 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2024 let mut delta = 0;
2025 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2026 while let Some(edit_since) = edits_since.peek() {
2027 // If the edit occurs after a diff hunk, then it does not
2028 // affect that hunk.
2029 if edit_since.old.start > range.end {
2030 break;
2031 }
2032 // If the edit precedes the diff hunk, then adjust the hunk
2033 // to reflect the edit.
2034 else if edit_since.old.end < range.start {
2035 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2036 edits_since.next();
2037 }
2038 // If the edit intersects a diff hunk, then discard that hunk.
2039 else {
2040 return None;
2041 }
2042 }
2043
2044 let start = (range.start as i64 + delta) as usize;
2045 let end = (range.end as i64 + delta) as usize;
2046 Some((start..end, new_text))
2047 });
2048
2049 self.start_transaction();
2050 self.text.set_line_ending(diff.line_ending);
2051 self.edit(adjusted_edits, None, cx);
2052 self.end_transaction(cx)
2053 }
2054
2055 pub fn has_unsaved_edits(&self) -> bool {
2056 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2057
2058 if last_version == self.version {
2059 self.has_unsaved_edits
2060 .set((last_version, has_unsaved_edits));
2061 return has_unsaved_edits;
2062 }
2063
2064 let has_edits = self.has_edits_since(&self.saved_version);
2065 self.has_unsaved_edits
2066 .set((self.version.clone(), has_edits));
2067 has_edits
2068 }
2069
2070 /// Checks if the buffer has unsaved changes.
2071 pub fn is_dirty(&self) -> bool {
2072 if self.capability == Capability::ReadOnly {
2073 return false;
2074 }
2075 if self.has_conflict {
2076 return true;
2077 }
2078 match self.file.as_ref().map(|f| f.disk_state()) {
2079 Some(DiskState::New) | Some(DiskState::Deleted) => {
2080 !self.is_empty() && self.has_unsaved_edits()
2081 }
2082 _ => self.has_unsaved_edits(),
2083 }
2084 }
2085
2086 /// Checks if the buffer and its file have both changed since the buffer
2087 /// was last saved or reloaded.
2088 pub fn has_conflict(&self) -> bool {
2089 if self.has_conflict {
2090 return true;
2091 }
2092 let Some(file) = self.file.as_ref() else {
2093 return false;
2094 };
2095 match file.disk_state() {
2096 DiskState::New => false,
2097 DiskState::Present { mtime } => match self.saved_mtime {
2098 Some(saved_mtime) => {
2099 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2100 }
2101 None => true,
2102 },
2103 DiskState::Deleted => false,
2104 }
2105 }
2106
2107 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2108 pub fn subscribe(&mut self) -> Subscription {
2109 self.text.subscribe()
2110 }
2111
2112 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2113 ///
2114 /// This allows downstream code to check if the buffer's text has changed without
2115 /// waiting for an effect cycle, which would be required if using eents.
2116 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2117 if let Err(ix) = self
2118 .change_bits
2119 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2120 {
2121 self.change_bits.insert(ix, bit);
2122 }
2123 }
2124
2125 /// Set the change bit for all "listeners".
2126 fn was_changed(&mut self) {
2127 self.change_bits.retain(|change_bit| {
2128 change_bit
2129 .upgrade()
2130 .inspect(|bit| {
2131 _ = bit.replace(true);
2132 })
2133 .is_some()
2134 });
2135 }
2136
2137 /// Starts a transaction, if one is not already in-progress. When undoing or
2138 /// redoing edits, all of the edits performed within a transaction are undone
2139 /// or redone together.
2140 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2141 self.start_transaction_at(Instant::now())
2142 }
2143
2144 /// Starts a transaction, providing the current time. Subsequent transactions
2145 /// that occur within a short period of time will be grouped together. This
2146 /// is controlled by the buffer's undo grouping duration.
2147 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2148 self.transaction_depth += 1;
2149 if self.was_dirty_before_starting_transaction.is_none() {
2150 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2151 }
2152 self.text.start_transaction_at(now)
2153 }
2154
2155 /// Terminates the current transaction, if this is the outermost transaction.
2156 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2157 self.end_transaction_at(Instant::now(), cx)
2158 }
2159
2160 /// Terminates the current transaction, providing the current time. Subsequent transactions
2161 /// that occur within a short period of time will be grouped together. This
2162 /// is controlled by the buffer's undo grouping duration.
2163 pub fn end_transaction_at(
2164 &mut self,
2165 now: Instant,
2166 cx: &mut Context<Self>,
2167 ) -> Option<TransactionId> {
2168 assert!(self.transaction_depth > 0);
2169 self.transaction_depth -= 1;
2170 let was_dirty = if self.transaction_depth == 0 {
2171 self.was_dirty_before_starting_transaction.take().unwrap()
2172 } else {
2173 false
2174 };
2175 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2176 self.did_edit(&start_version, was_dirty, cx);
2177 Some(transaction_id)
2178 } else {
2179 None
2180 }
2181 }
2182
2183 /// Manually add a transaction to the buffer's undo history.
2184 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2185 self.text.push_transaction(transaction, now);
2186 }
2187
2188 /// Differs from `push_transaction` in that it does not clear the redo
2189 /// stack. Intended to be used to create a parent transaction to merge
2190 /// potential child transactions into.
2191 ///
2192 /// The caller is responsible for removing it from the undo history using
2193 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2194 /// are merged into this transaction, the caller is responsible for ensuring
2195 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2196 /// cleared is to create transactions with the usual `start_transaction` and
2197 /// `end_transaction` methods and merging the resulting transactions into
2198 /// the transaction created by this method
2199 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2200 self.text.push_empty_transaction(now)
2201 }
2202
2203 /// Prevent the last transaction from being grouped with any subsequent transactions,
2204 /// even if they occur with the buffer's undo grouping duration.
2205 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2206 self.text.finalize_last_transaction()
2207 }
2208
2209 /// Manually group all changes since a given transaction.
2210 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2211 self.text.group_until_transaction(transaction_id);
2212 }
2213
2214 /// Manually remove a transaction from the buffer's undo history
2215 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2216 self.text.forget_transaction(transaction_id)
2217 }
2218
2219 /// Retrieve a transaction from the buffer's undo history
2220 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2221 self.text.get_transaction(transaction_id)
2222 }
2223
2224 /// Manually merge two transactions in the buffer's undo history.
2225 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2226 self.text.merge_transactions(transaction, destination);
2227 }
2228
2229 /// Waits for the buffer to receive operations with the given timestamps.
2230 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2231 &mut self,
2232 edit_ids: It,
2233 ) -> impl Future<Output = Result<()>> + use<It> {
2234 self.text.wait_for_edits(edit_ids)
2235 }
2236
2237 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2238 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2239 &mut self,
2240 anchors: It,
2241 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2242 self.text.wait_for_anchors(anchors)
2243 }
2244
2245 /// Waits for the buffer to receive operations up to the given version.
2246 pub fn wait_for_version(
2247 &mut self,
2248 version: clock::Global,
2249 ) -> impl Future<Output = Result<()>> + use<> {
2250 self.text.wait_for_version(version)
2251 }
2252
2253 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2254 /// [`Buffer::wait_for_version`] to resolve with an error.
2255 pub fn give_up_waiting(&mut self) {
2256 self.text.give_up_waiting();
2257 }
2258
2259 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2260 let mut rx = None;
2261 if !self.autoindent_requests.is_empty() {
2262 let channel = oneshot::channel();
2263 self.wait_for_autoindent_txs.push(channel.0);
2264 rx = Some(channel.1);
2265 }
2266 rx
2267 }
2268
2269 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2270 pub fn set_active_selections(
2271 &mut self,
2272 selections: Arc<[Selection<Anchor>]>,
2273 line_mode: bool,
2274 cursor_shape: CursorShape,
2275 cx: &mut Context<Self>,
2276 ) {
2277 let lamport_timestamp = self.text.lamport_clock.tick();
2278 self.remote_selections.insert(
2279 self.text.replica_id(),
2280 SelectionSet {
2281 selections: selections.clone(),
2282 lamport_timestamp,
2283 line_mode,
2284 cursor_shape,
2285 },
2286 );
2287 self.send_operation(
2288 Operation::UpdateSelections {
2289 selections,
2290 line_mode,
2291 lamport_timestamp,
2292 cursor_shape,
2293 },
2294 true,
2295 cx,
2296 );
2297 self.non_text_state_update_count += 1;
2298 cx.notify();
2299 }
2300
2301 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2302 /// this replica.
2303 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2304 if self
2305 .remote_selections
2306 .get(&self.text.replica_id())
2307 .is_none_or(|set| !set.selections.is_empty())
2308 {
2309 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2310 }
2311 }
2312
2313 pub fn set_agent_selections(
2314 &mut self,
2315 selections: Arc<[Selection<Anchor>]>,
2316 line_mode: bool,
2317 cursor_shape: CursorShape,
2318 cx: &mut Context<Self>,
2319 ) {
2320 let lamport_timestamp = self.text.lamport_clock.tick();
2321 self.remote_selections.insert(
2322 ReplicaId::AGENT,
2323 SelectionSet {
2324 selections,
2325 lamport_timestamp,
2326 line_mode,
2327 cursor_shape,
2328 },
2329 );
2330 self.non_text_state_update_count += 1;
2331 cx.notify();
2332 }
2333
2334 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2335 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2336 }
2337
2338 /// Replaces the buffer's entire text.
2339 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2340 where
2341 T: Into<Arc<str>>,
2342 {
2343 self.autoindent_requests.clear();
2344 self.edit([(0..self.len(), text)], None, cx)
2345 }
2346
2347 /// Appends the given text to the end of the buffer.
2348 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2349 where
2350 T: Into<Arc<str>>,
2351 {
2352 self.edit([(self.len()..self.len(), text)], None, cx)
2353 }
2354
2355 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2356 /// delete, and a string of text to insert at that location.
2357 ///
2358 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2359 /// request for the edited ranges, which will be processed when the buffer finishes
2360 /// parsing.
2361 ///
2362 /// Parsing takes place at the end of a transaction, and may compute synchronously
2363 /// or asynchronously, depending on the changes.
2364 pub fn edit<I, S, T>(
2365 &mut self,
2366 edits_iter: I,
2367 autoindent_mode: Option<AutoindentMode>,
2368 cx: &mut Context<Self>,
2369 ) -> Option<clock::Lamport>
2370 where
2371 I: IntoIterator<Item = (Range<S>, T)>,
2372 S: ToOffset,
2373 T: Into<Arc<str>>,
2374 {
2375 // Skip invalid edits and coalesce contiguous ones.
2376 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2377
2378 for (range, new_text) in edits_iter {
2379 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2380
2381 if range.start > range.end {
2382 mem::swap(&mut range.start, &mut range.end);
2383 }
2384 let new_text = new_text.into();
2385 if !new_text.is_empty() || !range.is_empty() {
2386 if let Some((prev_range, prev_text)) = edits.last_mut()
2387 && prev_range.end >= range.start
2388 {
2389 prev_range.end = cmp::max(prev_range.end, range.end);
2390 *prev_text = format!("{prev_text}{new_text}").into();
2391 } else {
2392 edits.push((range, new_text));
2393 }
2394 }
2395 }
2396 if edits.is_empty() {
2397 return None;
2398 }
2399
2400 self.start_transaction();
2401 self.pending_autoindent.take();
2402 let autoindent_request = autoindent_mode
2403 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2404
2405 let edit_operation = self
2406 .text
2407 .edit(edits.iter().cloned(), cx.background_executor());
2408 let edit_id = edit_operation.timestamp();
2409
2410 if let Some((before_edit, mode)) = autoindent_request {
2411 let mut delta = 0isize;
2412 let mut previous_setting = None;
2413 let entries: Vec<_> = edits
2414 .into_iter()
2415 .enumerate()
2416 .zip(&edit_operation.as_edit().unwrap().new_text)
2417 .filter(|((_, (range, _)), _)| {
2418 let language = before_edit.language_at(range.start);
2419 let language_id = language.map(|l| l.id());
2420 if let Some((cached_language_id, auto_indent)) = previous_setting
2421 && cached_language_id == language_id
2422 {
2423 auto_indent
2424 } else {
2425 // The auto-indent setting is not present in editorconfigs, hence
2426 // we can avoid passing the file here.
2427 let auto_indent =
2428 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2429 previous_setting = Some((language_id, auto_indent));
2430 auto_indent
2431 }
2432 })
2433 .map(|((ix, (range, _)), new_text)| {
2434 let new_text_length = new_text.len();
2435 let old_start = range.start.to_point(&before_edit);
2436 let new_start = (delta + range.start as isize) as usize;
2437 let range_len = range.end - range.start;
2438 delta += new_text_length as isize - range_len as isize;
2439
2440 // Decide what range of the insertion to auto-indent, and whether
2441 // the first line of the insertion should be considered a newly-inserted line
2442 // or an edit to an existing line.
2443 let mut range_of_insertion_to_indent = 0..new_text_length;
2444 let mut first_line_is_new = true;
2445
2446 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2447 let old_line_end = before_edit.line_len(old_start.row);
2448
2449 if old_start.column > old_line_start {
2450 first_line_is_new = false;
2451 }
2452
2453 if !new_text.contains('\n')
2454 && (old_start.column + (range_len as u32) < old_line_end
2455 || old_line_end == old_line_start)
2456 {
2457 first_line_is_new = false;
2458 }
2459
2460 // When inserting text starting with a newline, avoid auto-indenting the
2461 // previous line.
2462 if new_text.starts_with('\n') {
2463 range_of_insertion_to_indent.start += 1;
2464 first_line_is_new = true;
2465 }
2466
2467 let mut original_indent_column = None;
2468 if let AutoindentMode::Block {
2469 original_indent_columns,
2470 } = &mode
2471 {
2472 original_indent_column = Some(if new_text.starts_with('\n') {
2473 indent_size_for_text(
2474 new_text[range_of_insertion_to_indent.clone()].chars(),
2475 )
2476 .len
2477 } else {
2478 original_indent_columns
2479 .get(ix)
2480 .copied()
2481 .flatten()
2482 .unwrap_or_else(|| {
2483 indent_size_for_text(
2484 new_text[range_of_insertion_to_indent.clone()].chars(),
2485 )
2486 .len
2487 })
2488 });
2489
2490 // Avoid auto-indenting the line after the edit.
2491 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2492 range_of_insertion_to_indent.end -= 1;
2493 }
2494 }
2495
2496 AutoindentRequestEntry {
2497 first_line_is_new,
2498 original_indent_column,
2499 indent_size: before_edit.language_indent_size_at(range.start, cx),
2500 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2501 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2502 }
2503 })
2504 .collect();
2505
2506 if !entries.is_empty() {
2507 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2508 before_edit,
2509 entries,
2510 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2511 ignore_empty_lines: false,
2512 }));
2513 }
2514 }
2515
2516 self.end_transaction(cx);
2517 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2518 Some(edit_id)
2519 }
2520
2521 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2522 self.was_changed();
2523
2524 if self.edits_since::<usize>(old_version).next().is_none() {
2525 return;
2526 }
2527
2528 self.reparse(cx);
2529 cx.emit(BufferEvent::Edited);
2530 if was_dirty != self.is_dirty() {
2531 cx.emit(BufferEvent::DirtyChanged);
2532 }
2533 cx.notify();
2534 }
2535
2536 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2537 where
2538 I: IntoIterator<Item = Range<T>>,
2539 T: ToOffset + Copy,
2540 {
2541 let before_edit = self.snapshot();
2542 let entries = ranges
2543 .into_iter()
2544 .map(|range| AutoindentRequestEntry {
2545 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2546 first_line_is_new: true,
2547 indent_size: before_edit.language_indent_size_at(range.start, cx),
2548 original_indent_column: None,
2549 })
2550 .collect();
2551 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2552 before_edit,
2553 entries,
2554 is_block_mode: false,
2555 ignore_empty_lines: true,
2556 }));
2557 self.request_autoindent(cx);
2558 }
2559
2560 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2561 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2562 pub fn insert_empty_line(
2563 &mut self,
2564 position: impl ToPoint,
2565 space_above: bool,
2566 space_below: bool,
2567 cx: &mut Context<Self>,
2568 ) -> Point {
2569 let mut position = position.to_point(self);
2570
2571 self.start_transaction();
2572
2573 self.edit(
2574 [(position..position, "\n")],
2575 Some(AutoindentMode::EachLine),
2576 cx,
2577 );
2578
2579 if position.column > 0 {
2580 position += Point::new(1, 0);
2581 }
2582
2583 if !self.is_line_blank(position.row) {
2584 self.edit(
2585 [(position..position, "\n")],
2586 Some(AutoindentMode::EachLine),
2587 cx,
2588 );
2589 }
2590
2591 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2592 self.edit(
2593 [(position..position, "\n")],
2594 Some(AutoindentMode::EachLine),
2595 cx,
2596 );
2597 position.row += 1;
2598 }
2599
2600 if space_below
2601 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2602 {
2603 self.edit(
2604 [(position..position, "\n")],
2605 Some(AutoindentMode::EachLine),
2606 cx,
2607 );
2608 }
2609
2610 self.end_transaction(cx);
2611
2612 position
2613 }
2614
2615 /// Applies the given remote operations to the buffer.
2616 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2617 self.pending_autoindent.take();
2618 let was_dirty = self.is_dirty();
2619 let old_version = self.version.clone();
2620 let mut deferred_ops = Vec::new();
2621 let buffer_ops = ops
2622 .into_iter()
2623 .filter_map(|op| match op {
2624 Operation::Buffer(op) => Some(op),
2625 _ => {
2626 if self.can_apply_op(&op) {
2627 self.apply_op(op, cx);
2628 } else {
2629 deferred_ops.push(op);
2630 }
2631 None
2632 }
2633 })
2634 .collect::<Vec<_>>();
2635 for operation in buffer_ops.iter() {
2636 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2637 }
2638 self.text
2639 .apply_ops(buffer_ops, Some(cx.background_executor()));
2640 self.deferred_ops.insert(deferred_ops);
2641 self.flush_deferred_ops(cx);
2642 self.did_edit(&old_version, was_dirty, cx);
2643 // Notify independently of whether the buffer was edited as the operations could include a
2644 // selection update.
2645 cx.notify();
2646 }
2647
2648 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2649 let mut deferred_ops = Vec::new();
2650 for op in self.deferred_ops.drain().iter().cloned() {
2651 if self.can_apply_op(&op) {
2652 self.apply_op(op, cx);
2653 } else {
2654 deferred_ops.push(op);
2655 }
2656 }
2657 self.deferred_ops.insert(deferred_ops);
2658 }
2659
2660 pub fn has_deferred_ops(&self) -> bool {
2661 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2662 }
2663
2664 fn can_apply_op(&self, operation: &Operation) -> bool {
2665 match operation {
2666 Operation::Buffer(_) => {
2667 unreachable!("buffer operations should never be applied at this layer")
2668 }
2669 Operation::UpdateDiagnostics {
2670 diagnostics: diagnostic_set,
2671 ..
2672 } => diagnostic_set.iter().all(|diagnostic| {
2673 self.text.can_resolve(&diagnostic.range.start)
2674 && self.text.can_resolve(&diagnostic.range.end)
2675 }),
2676 Operation::UpdateSelections { selections, .. } => selections
2677 .iter()
2678 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2679 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2680 }
2681 }
2682
2683 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2684 match operation {
2685 Operation::Buffer(_) => {
2686 unreachable!("buffer operations should never be applied at this layer")
2687 }
2688 Operation::UpdateDiagnostics {
2689 server_id,
2690 diagnostics: diagnostic_set,
2691 lamport_timestamp,
2692 } => {
2693 let snapshot = self.snapshot();
2694 self.apply_diagnostic_update(
2695 server_id,
2696 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2697 lamport_timestamp,
2698 cx,
2699 );
2700 }
2701 Operation::UpdateSelections {
2702 selections,
2703 lamport_timestamp,
2704 line_mode,
2705 cursor_shape,
2706 } => {
2707 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2708 && set.lamport_timestamp > lamport_timestamp
2709 {
2710 return;
2711 }
2712
2713 self.remote_selections.insert(
2714 lamport_timestamp.replica_id,
2715 SelectionSet {
2716 selections,
2717 lamport_timestamp,
2718 line_mode,
2719 cursor_shape,
2720 },
2721 );
2722 self.text.lamport_clock.observe(lamport_timestamp);
2723 self.non_text_state_update_count += 1;
2724 }
2725 Operation::UpdateCompletionTriggers {
2726 triggers,
2727 lamport_timestamp,
2728 server_id,
2729 } => {
2730 if triggers.is_empty() {
2731 self.completion_triggers_per_language_server
2732 .remove(&server_id);
2733 self.completion_triggers = self
2734 .completion_triggers_per_language_server
2735 .values()
2736 .flat_map(|triggers| triggers.iter().cloned())
2737 .collect();
2738 } else {
2739 self.completion_triggers_per_language_server
2740 .insert(server_id, triggers.iter().cloned().collect());
2741 self.completion_triggers.extend(triggers);
2742 }
2743 self.text.lamport_clock.observe(lamport_timestamp);
2744 }
2745 Operation::UpdateLineEnding {
2746 line_ending,
2747 lamport_timestamp,
2748 } => {
2749 self.text.set_line_ending(line_ending);
2750 self.text.lamport_clock.observe(lamport_timestamp);
2751 }
2752 }
2753 }
2754
2755 fn apply_diagnostic_update(
2756 &mut self,
2757 server_id: LanguageServerId,
2758 diagnostics: DiagnosticSet,
2759 lamport_timestamp: clock::Lamport,
2760 cx: &mut Context<Self>,
2761 ) {
2762 if lamport_timestamp > self.diagnostics_timestamp {
2763 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2764 if diagnostics.is_empty() {
2765 if let Ok(ix) = ix {
2766 self.diagnostics.remove(ix);
2767 }
2768 } else {
2769 match ix {
2770 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2771 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2772 };
2773 }
2774 self.diagnostics_timestamp = lamport_timestamp;
2775 self.non_text_state_update_count += 1;
2776 self.text.lamport_clock.observe(lamport_timestamp);
2777 cx.notify();
2778 cx.emit(BufferEvent::DiagnosticsUpdated);
2779 }
2780 }
2781
2782 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2783 self.was_changed();
2784 cx.emit(BufferEvent::Operation {
2785 operation,
2786 is_local,
2787 });
2788 }
2789
2790 /// Removes the selections for a given peer.
2791 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2792 self.remote_selections.remove(&replica_id);
2793 cx.notify();
2794 }
2795
2796 /// Undoes the most recent transaction.
2797 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2798 let was_dirty = self.is_dirty();
2799 let old_version = self.version.clone();
2800
2801 if let Some((transaction_id, operation)) = self.text.undo() {
2802 self.send_operation(Operation::Buffer(operation), true, cx);
2803 self.did_edit(&old_version, was_dirty, cx);
2804 Some(transaction_id)
2805 } else {
2806 None
2807 }
2808 }
2809
2810 /// Manually undoes a specific transaction in the buffer's undo history.
2811 pub fn undo_transaction(
2812 &mut self,
2813 transaction_id: TransactionId,
2814 cx: &mut Context<Self>,
2815 ) -> bool {
2816 let was_dirty = self.is_dirty();
2817 let old_version = self.version.clone();
2818 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2819 self.send_operation(Operation::Buffer(operation), true, cx);
2820 self.did_edit(&old_version, was_dirty, cx);
2821 true
2822 } else {
2823 false
2824 }
2825 }
2826
2827 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2828 pub fn undo_to_transaction(
2829 &mut self,
2830 transaction_id: TransactionId,
2831 cx: &mut Context<Self>,
2832 ) -> bool {
2833 let was_dirty = self.is_dirty();
2834 let old_version = self.version.clone();
2835
2836 let operations = self.text.undo_to_transaction(transaction_id);
2837 let undone = !operations.is_empty();
2838 for operation in operations {
2839 self.send_operation(Operation::Buffer(operation), true, cx);
2840 }
2841 if undone {
2842 self.did_edit(&old_version, was_dirty, cx)
2843 }
2844 undone
2845 }
2846
2847 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2848 let was_dirty = self.is_dirty();
2849 let operation = self.text.undo_operations(counts);
2850 let old_version = self.version.clone();
2851 self.send_operation(Operation::Buffer(operation), true, cx);
2852 self.did_edit(&old_version, was_dirty, cx);
2853 }
2854
2855 /// Manually redoes a specific transaction in the buffer's redo history.
2856 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2857 let was_dirty = self.is_dirty();
2858 let old_version = self.version.clone();
2859
2860 if let Some((transaction_id, operation)) = self.text.redo() {
2861 self.send_operation(Operation::Buffer(operation), true, cx);
2862 self.did_edit(&old_version, was_dirty, cx);
2863 Some(transaction_id)
2864 } else {
2865 None
2866 }
2867 }
2868
2869 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2870 pub fn redo_to_transaction(
2871 &mut self,
2872 transaction_id: TransactionId,
2873 cx: &mut Context<Self>,
2874 ) -> bool {
2875 let was_dirty = self.is_dirty();
2876 let old_version = self.version.clone();
2877
2878 let operations = self.text.redo_to_transaction(transaction_id);
2879 let redone = !operations.is_empty();
2880 for operation in operations {
2881 self.send_operation(Operation::Buffer(operation), true, cx);
2882 }
2883 if redone {
2884 self.did_edit(&old_version, was_dirty, cx)
2885 }
2886 redone
2887 }
2888
2889 /// Override current completion triggers with the user-provided completion triggers.
2890 pub fn set_completion_triggers(
2891 &mut self,
2892 server_id: LanguageServerId,
2893 triggers: BTreeSet<String>,
2894 cx: &mut Context<Self>,
2895 ) {
2896 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2897 if triggers.is_empty() {
2898 self.completion_triggers_per_language_server
2899 .remove(&server_id);
2900 self.completion_triggers = self
2901 .completion_triggers_per_language_server
2902 .values()
2903 .flat_map(|triggers| triggers.iter().cloned())
2904 .collect();
2905 } else {
2906 self.completion_triggers_per_language_server
2907 .insert(server_id, triggers.clone());
2908 self.completion_triggers.extend(triggers.iter().cloned());
2909 }
2910 self.send_operation(
2911 Operation::UpdateCompletionTriggers {
2912 triggers: triggers.into_iter().collect(),
2913 lamport_timestamp: self.completion_triggers_timestamp,
2914 server_id,
2915 },
2916 true,
2917 cx,
2918 );
2919 cx.notify();
2920 }
2921
2922 /// Returns a list of strings which trigger a completion menu for this language.
2923 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2924 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2925 &self.completion_triggers
2926 }
2927
2928 /// Call this directly after performing edits to prevent the preview tab
2929 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2930 /// to return false until there are additional edits.
2931 pub fn refresh_preview(&mut self) {
2932 self.preview_version = self.version.clone();
2933 }
2934
2935 /// Whether we should preserve the preview status of a tab containing this buffer.
2936 pub fn preserve_preview(&self) -> bool {
2937 !self.has_edits_since(&self.preview_version)
2938 }
2939
2940 /// Update the `encoding` field, whenever the `encoding` field of the file changes
2941 pub fn update_encoding(&mut self) {
2942 if let Some(file) = self.file() {
2943 if let Some(encoding) = file.encoding() {
2944 self.encoding.set(encoding.get());
2945 } else {
2946 self.encoding.set(encodings::UTF_8);
2947 };
2948 }
2949 }
2950}
2951
2952#[doc(hidden)]
2953#[cfg(any(test, feature = "test-support"))]
2954impl Buffer {
2955 pub fn edit_via_marked_text(
2956 &mut self,
2957 marked_string: &str,
2958 autoindent_mode: Option<AutoindentMode>,
2959 cx: &mut Context<Self>,
2960 ) {
2961 let edits = self.edits_for_marked_text(marked_string);
2962 self.edit(edits, autoindent_mode, cx);
2963 }
2964
2965 pub fn set_group_interval(&mut self, group_interval: Duration) {
2966 self.text.set_group_interval(group_interval);
2967 }
2968
2969 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2970 where
2971 T: rand::Rng,
2972 {
2973 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2974 let mut last_end = None;
2975 for _ in 0..old_range_count {
2976 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2977 break;
2978 }
2979
2980 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2981 let mut range = self.random_byte_range(new_start, rng);
2982 if rng.random_bool(0.2) {
2983 mem::swap(&mut range.start, &mut range.end);
2984 }
2985 last_end = Some(range.end);
2986
2987 let new_text_len = rng.random_range(0..10);
2988 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2989 new_text = new_text.to_uppercase();
2990
2991 edits.push((range, new_text));
2992 }
2993 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2994 self.edit(edits, None, cx);
2995 }
2996
2997 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2998 let was_dirty = self.is_dirty();
2999 let old_version = self.version.clone();
3000
3001 let ops = self.text.randomly_undo_redo(rng);
3002 if !ops.is_empty() {
3003 for op in ops {
3004 self.send_operation(Operation::Buffer(op), true, cx);
3005 self.did_edit(&old_version, was_dirty, cx);
3006 }
3007 }
3008 }
3009}
3010
3011impl EventEmitter<BufferEvent> for Buffer {}
3012
3013impl Deref for Buffer {
3014 type Target = TextBuffer;
3015
3016 fn deref(&self) -> &Self::Target {
3017 &self.text
3018 }
3019}
3020
3021impl BufferSnapshot {
3022 /// Returns [`IndentSize`] for a given line that respects user settings and
3023 /// language preferences.
3024 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3025 indent_size_for_line(self, row)
3026 }
3027
3028 /// Returns [`IndentSize`] for a given position that respects user settings
3029 /// and language preferences.
3030 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3031 let settings = language_settings(
3032 self.language_at(position).map(|l| l.name()),
3033 self.file(),
3034 cx,
3035 );
3036 if settings.hard_tabs {
3037 IndentSize::tab()
3038 } else {
3039 IndentSize::spaces(settings.tab_size.get())
3040 }
3041 }
3042
3043 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3044 /// is passed in as `single_indent_size`.
3045 pub fn suggested_indents(
3046 &self,
3047 rows: impl Iterator<Item = u32>,
3048 single_indent_size: IndentSize,
3049 ) -> BTreeMap<u32, IndentSize> {
3050 let mut result = BTreeMap::new();
3051
3052 for row_range in contiguous_ranges(rows, 10) {
3053 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3054 Some(suggestions) => suggestions,
3055 _ => break,
3056 };
3057
3058 for (row, suggestion) in row_range.zip(suggestions) {
3059 let indent_size = if let Some(suggestion) = suggestion {
3060 result
3061 .get(&suggestion.basis_row)
3062 .copied()
3063 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3064 .with_delta(suggestion.delta, single_indent_size)
3065 } else {
3066 self.indent_size_for_line(row)
3067 };
3068
3069 result.insert(row, indent_size);
3070 }
3071 }
3072
3073 result
3074 }
3075
3076 fn suggest_autoindents(
3077 &self,
3078 row_range: Range<u32>,
3079 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3080 let config = &self.language.as_ref()?.config;
3081 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3082
3083 #[derive(Debug, Clone)]
3084 struct StartPosition {
3085 start: Point,
3086 suffix: SharedString,
3087 }
3088
3089 // Find the suggested indentation ranges based on the syntax tree.
3090 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3091 let end = Point::new(row_range.end, 0);
3092 let range = (start..end).to_offset(&self.text);
3093 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3094 Some(&grammar.indents_config.as_ref()?.query)
3095 });
3096 let indent_configs = matches
3097 .grammars()
3098 .iter()
3099 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3100 .collect::<Vec<_>>();
3101
3102 let mut indent_ranges = Vec::<Range<Point>>::new();
3103 let mut start_positions = Vec::<StartPosition>::new();
3104 let mut outdent_positions = Vec::<Point>::new();
3105 while let Some(mat) = matches.peek() {
3106 let mut start: Option<Point> = None;
3107 let mut end: Option<Point> = None;
3108
3109 let config = indent_configs[mat.grammar_index];
3110 for capture in mat.captures {
3111 if capture.index == config.indent_capture_ix {
3112 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3113 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3114 } else if Some(capture.index) == config.start_capture_ix {
3115 start = Some(Point::from_ts_point(capture.node.end_position()));
3116 } else if Some(capture.index) == config.end_capture_ix {
3117 end = Some(Point::from_ts_point(capture.node.start_position()));
3118 } else if Some(capture.index) == config.outdent_capture_ix {
3119 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3120 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3121 start_positions.push(StartPosition {
3122 start: Point::from_ts_point(capture.node.start_position()),
3123 suffix: suffix.clone(),
3124 });
3125 }
3126 }
3127
3128 matches.advance();
3129 if let Some((start, end)) = start.zip(end) {
3130 if start.row == end.row {
3131 continue;
3132 }
3133 let range = start..end;
3134 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3135 Err(ix) => indent_ranges.insert(ix, range),
3136 Ok(ix) => {
3137 let prev_range = &mut indent_ranges[ix];
3138 prev_range.end = prev_range.end.max(range.end);
3139 }
3140 }
3141 }
3142 }
3143
3144 let mut error_ranges = Vec::<Range<Point>>::new();
3145 let mut matches = self
3146 .syntax
3147 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3148 while let Some(mat) = matches.peek() {
3149 let node = mat.captures[0].node;
3150 let start = Point::from_ts_point(node.start_position());
3151 let end = Point::from_ts_point(node.end_position());
3152 let range = start..end;
3153 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3154 Ok(ix) | Err(ix) => ix,
3155 };
3156 let mut end_ix = ix;
3157 while let Some(existing_range) = error_ranges.get(end_ix) {
3158 if existing_range.end < end {
3159 end_ix += 1;
3160 } else {
3161 break;
3162 }
3163 }
3164 error_ranges.splice(ix..end_ix, [range]);
3165 matches.advance();
3166 }
3167
3168 outdent_positions.sort();
3169 for outdent_position in outdent_positions {
3170 // find the innermost indent range containing this outdent_position
3171 // set its end to the outdent position
3172 if let Some(range_to_truncate) = indent_ranges
3173 .iter_mut()
3174 .filter(|indent_range| indent_range.contains(&outdent_position))
3175 .next_back()
3176 {
3177 range_to_truncate.end = outdent_position;
3178 }
3179 }
3180
3181 start_positions.sort_by_key(|b| b.start);
3182
3183 // Find the suggested indentation increases and decreased based on regexes.
3184 let mut regex_outdent_map = HashMap::default();
3185 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3186 let mut start_positions_iter = start_positions.iter().peekable();
3187
3188 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3189 self.for_each_line(
3190 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3191 ..Point::new(row_range.end, 0),
3192 |row, line| {
3193 if config
3194 .decrease_indent_pattern
3195 .as_ref()
3196 .is_some_and(|regex| regex.is_match(line))
3197 {
3198 indent_change_rows.push((row, Ordering::Less));
3199 }
3200 if config
3201 .increase_indent_pattern
3202 .as_ref()
3203 .is_some_and(|regex| regex.is_match(line))
3204 {
3205 indent_change_rows.push((row + 1, Ordering::Greater));
3206 }
3207 while let Some(pos) = start_positions_iter.peek() {
3208 if pos.start.row < row {
3209 let pos = start_positions_iter.next().unwrap();
3210 last_seen_suffix
3211 .entry(pos.suffix.to_string())
3212 .or_default()
3213 .push(pos.start);
3214 } else {
3215 break;
3216 }
3217 }
3218 for rule in &config.decrease_indent_patterns {
3219 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3220 let row_start_column = self.indent_size_for_line(row).len;
3221 let basis_row = rule
3222 .valid_after
3223 .iter()
3224 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3225 .flatten()
3226 .filter(|start_point| start_point.column <= row_start_column)
3227 .max_by_key(|start_point| start_point.row);
3228 if let Some(outdent_to_row) = basis_row {
3229 regex_outdent_map.insert(row, outdent_to_row.row);
3230 }
3231 break;
3232 }
3233 }
3234 },
3235 );
3236
3237 let mut indent_changes = indent_change_rows.into_iter().peekable();
3238 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3239 prev_non_blank_row.unwrap_or(0)
3240 } else {
3241 row_range.start.saturating_sub(1)
3242 };
3243
3244 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3245 Some(row_range.map(move |row| {
3246 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3247
3248 let mut indent_from_prev_row = false;
3249 let mut outdent_from_prev_row = false;
3250 let mut outdent_to_row = u32::MAX;
3251 let mut from_regex = false;
3252
3253 while let Some((indent_row, delta)) = indent_changes.peek() {
3254 match indent_row.cmp(&row) {
3255 Ordering::Equal => match delta {
3256 Ordering::Less => {
3257 from_regex = true;
3258 outdent_from_prev_row = true
3259 }
3260 Ordering::Greater => {
3261 indent_from_prev_row = true;
3262 from_regex = true
3263 }
3264 _ => {}
3265 },
3266
3267 Ordering::Greater => break,
3268 Ordering::Less => {}
3269 }
3270
3271 indent_changes.next();
3272 }
3273
3274 for range in &indent_ranges {
3275 if range.start.row >= row {
3276 break;
3277 }
3278 if range.start.row == prev_row && range.end > row_start {
3279 indent_from_prev_row = true;
3280 }
3281 if range.end > prev_row_start && range.end <= row_start {
3282 outdent_to_row = outdent_to_row.min(range.start.row);
3283 }
3284 }
3285
3286 if let Some(basis_row) = regex_outdent_map.get(&row) {
3287 indent_from_prev_row = false;
3288 outdent_to_row = *basis_row;
3289 from_regex = true;
3290 }
3291
3292 let within_error = error_ranges
3293 .iter()
3294 .any(|e| e.start.row < row && e.end > row_start);
3295
3296 let suggestion = if outdent_to_row == prev_row
3297 || (outdent_from_prev_row && indent_from_prev_row)
3298 {
3299 Some(IndentSuggestion {
3300 basis_row: prev_row,
3301 delta: Ordering::Equal,
3302 within_error: within_error && !from_regex,
3303 })
3304 } else if indent_from_prev_row {
3305 Some(IndentSuggestion {
3306 basis_row: prev_row,
3307 delta: Ordering::Greater,
3308 within_error: within_error && !from_regex,
3309 })
3310 } else if outdent_to_row < prev_row {
3311 Some(IndentSuggestion {
3312 basis_row: outdent_to_row,
3313 delta: Ordering::Equal,
3314 within_error: within_error && !from_regex,
3315 })
3316 } else if outdent_from_prev_row {
3317 Some(IndentSuggestion {
3318 basis_row: prev_row,
3319 delta: Ordering::Less,
3320 within_error: within_error && !from_regex,
3321 })
3322 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3323 {
3324 Some(IndentSuggestion {
3325 basis_row: prev_row,
3326 delta: Ordering::Equal,
3327 within_error: within_error && !from_regex,
3328 })
3329 } else {
3330 None
3331 };
3332
3333 prev_row = row;
3334 prev_row_start = row_start;
3335 suggestion
3336 }))
3337 }
3338
3339 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3340 while row > 0 {
3341 row -= 1;
3342 if !self.is_line_blank(row) {
3343 return Some(row);
3344 }
3345 }
3346 None
3347 }
3348
3349 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3350 let captures = self.syntax.captures(range, &self.text, |grammar| {
3351 grammar
3352 .highlights_config
3353 .as_ref()
3354 .map(|config| &config.query)
3355 });
3356 let highlight_maps = captures
3357 .grammars()
3358 .iter()
3359 .map(|grammar| grammar.highlight_map())
3360 .collect();
3361 (captures, highlight_maps)
3362 }
3363
3364 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3365 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3366 /// returned in chunks where each chunk has a single syntax highlighting style and
3367 /// diagnostic status.
3368 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3369 let range = range.start.to_offset(self)..range.end.to_offset(self);
3370
3371 let mut syntax = None;
3372 if language_aware {
3373 syntax = Some(self.get_highlights(range.clone()));
3374 }
3375 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3376 let diagnostics = language_aware;
3377 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3378 }
3379
3380 pub fn highlighted_text_for_range<T: ToOffset>(
3381 &self,
3382 range: Range<T>,
3383 override_style: Option<HighlightStyle>,
3384 syntax_theme: &SyntaxTheme,
3385 ) -> HighlightedText {
3386 HighlightedText::from_buffer_range(
3387 range,
3388 &self.text,
3389 &self.syntax,
3390 override_style,
3391 syntax_theme,
3392 )
3393 }
3394
3395 /// Invokes the given callback for each line of text in the given range of the buffer.
3396 /// Uses callback to avoid allocating a string for each line.
3397 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3398 let mut line = String::new();
3399 let mut row = range.start.row;
3400 for chunk in self
3401 .as_rope()
3402 .chunks_in_range(range.to_offset(self))
3403 .chain(["\n"])
3404 {
3405 for (newline_ix, text) in chunk.split('\n').enumerate() {
3406 if newline_ix > 0 {
3407 callback(row, &line);
3408 row += 1;
3409 line.clear();
3410 }
3411 line.push_str(text);
3412 }
3413 }
3414 }
3415
3416 /// Iterates over every [`SyntaxLayer`] in the buffer.
3417 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3418 self.syntax_layers_for_range(0..self.len(), true)
3419 }
3420
3421 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3422 let offset = position.to_offset(self);
3423 self.syntax_layers_for_range(offset..offset, false)
3424 .filter(|l| l.node().end_byte() > offset)
3425 .last()
3426 }
3427
3428 pub fn syntax_layers_for_range<D: ToOffset>(
3429 &self,
3430 range: Range<D>,
3431 include_hidden: bool,
3432 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3433 self.syntax
3434 .layers_for_range(range, &self.text, include_hidden)
3435 }
3436
3437 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3438 &self,
3439 range: Range<D>,
3440 ) -> Option<SyntaxLayer<'_>> {
3441 let range = range.to_offset(self);
3442 self.syntax
3443 .layers_for_range(range, &self.text, false)
3444 .max_by(|a, b| {
3445 if a.depth != b.depth {
3446 a.depth.cmp(&b.depth)
3447 } else if a.offset.0 != b.offset.0 {
3448 a.offset.0.cmp(&b.offset.0)
3449 } else {
3450 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3451 }
3452 })
3453 }
3454
3455 /// Returns the main [`Language`].
3456 pub fn language(&self) -> Option<&Arc<Language>> {
3457 self.language.as_ref()
3458 }
3459
3460 /// Returns the [`Language`] at the given location.
3461 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3462 self.syntax_layer_at(position)
3463 .map(|info| info.language)
3464 .or(self.language.as_ref())
3465 }
3466
3467 /// Returns the settings for the language at the given location.
3468 pub fn settings_at<'a, D: ToOffset>(
3469 &'a self,
3470 position: D,
3471 cx: &'a App,
3472 ) -> Cow<'a, LanguageSettings> {
3473 language_settings(
3474 self.language_at(position).map(|l| l.name()),
3475 self.file.as_ref(),
3476 cx,
3477 )
3478 }
3479
3480 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3481 CharClassifier::new(self.language_scope_at(point))
3482 }
3483
3484 /// Returns the [`LanguageScope`] at the given location.
3485 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3486 let offset = position.to_offset(self);
3487 let mut scope = None;
3488 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3489
3490 // Use the layer that has the smallest node intersecting the given point.
3491 for layer in self
3492 .syntax
3493 .layers_for_range(offset..offset, &self.text, false)
3494 {
3495 let mut cursor = layer.node().walk();
3496
3497 let mut range = None;
3498 loop {
3499 let child_range = cursor.node().byte_range();
3500 if !child_range.contains(&offset) {
3501 break;
3502 }
3503
3504 range = Some(child_range);
3505 if cursor.goto_first_child_for_byte(offset).is_none() {
3506 break;
3507 }
3508 }
3509
3510 if let Some(range) = range
3511 && smallest_range_and_depth.as_ref().is_none_or(
3512 |(smallest_range, smallest_range_depth)| {
3513 if layer.depth > *smallest_range_depth {
3514 true
3515 } else if layer.depth == *smallest_range_depth {
3516 range.len() < smallest_range.len()
3517 } else {
3518 false
3519 }
3520 },
3521 )
3522 {
3523 smallest_range_and_depth = Some((range, layer.depth));
3524 scope = Some(LanguageScope {
3525 language: layer.language.clone(),
3526 override_id: layer.override_id(offset, &self.text),
3527 });
3528 }
3529 }
3530
3531 scope.or_else(|| {
3532 self.language.clone().map(|language| LanguageScope {
3533 language,
3534 override_id: None,
3535 })
3536 })
3537 }
3538
3539 /// Returns a tuple of the range and character kind of the word
3540 /// surrounding the given position.
3541 pub fn surrounding_word<T: ToOffset>(
3542 &self,
3543 start: T,
3544 scope_context: Option<CharScopeContext>,
3545 ) -> (Range<usize>, Option<CharKind>) {
3546 let mut start = start.to_offset(self);
3547 let mut end = start;
3548 let mut next_chars = self.chars_at(start).take(128).peekable();
3549 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3550
3551 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3552 let word_kind = cmp::max(
3553 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3554 next_chars.peek().copied().map(|c| classifier.kind(c)),
3555 );
3556
3557 for ch in prev_chars {
3558 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3559 start -= ch.len_utf8();
3560 } else {
3561 break;
3562 }
3563 }
3564
3565 for ch in next_chars {
3566 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3567 end += ch.len_utf8();
3568 } else {
3569 break;
3570 }
3571 }
3572
3573 (start..end, word_kind)
3574 }
3575
3576 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3577 /// range. When `require_larger` is true, the node found must be larger than the query range.
3578 ///
3579 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3580 /// be moved to the root of the tree.
3581 fn goto_node_enclosing_range(
3582 cursor: &mut tree_sitter::TreeCursor,
3583 query_range: &Range<usize>,
3584 require_larger: bool,
3585 ) -> bool {
3586 let mut ascending = false;
3587 loop {
3588 let mut range = cursor.node().byte_range();
3589 if query_range.is_empty() {
3590 // When the query range is empty and the current node starts after it, move to the
3591 // previous sibling to find the node the containing node.
3592 if range.start > query_range.start {
3593 cursor.goto_previous_sibling();
3594 range = cursor.node().byte_range();
3595 }
3596 } else {
3597 // When the query range is non-empty and the current node ends exactly at the start,
3598 // move to the next sibling to find a node that extends beyond the start.
3599 if range.end == query_range.start {
3600 cursor.goto_next_sibling();
3601 range = cursor.node().byte_range();
3602 }
3603 }
3604
3605 let encloses = range.contains_inclusive(query_range)
3606 && (!require_larger || range.len() > query_range.len());
3607 if !encloses {
3608 ascending = true;
3609 if !cursor.goto_parent() {
3610 return false;
3611 }
3612 continue;
3613 } else if ascending {
3614 return true;
3615 }
3616
3617 // Descend into the current node.
3618 if cursor
3619 .goto_first_child_for_byte(query_range.start)
3620 .is_none()
3621 {
3622 return true;
3623 }
3624 }
3625 }
3626
3627 pub fn syntax_ancestor<'a, T: ToOffset>(
3628 &'a self,
3629 range: Range<T>,
3630 ) -> Option<tree_sitter::Node<'a>> {
3631 let range = range.start.to_offset(self)..range.end.to_offset(self);
3632 let mut result: Option<tree_sitter::Node<'a>> = None;
3633 for layer in self
3634 .syntax
3635 .layers_for_range(range.clone(), &self.text, true)
3636 {
3637 let mut cursor = layer.node().walk();
3638
3639 // Find the node that both contains the range and is larger than it.
3640 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3641 continue;
3642 }
3643
3644 let left_node = cursor.node();
3645 let mut layer_result = left_node;
3646
3647 // For an empty range, try to find another node immediately to the right of the range.
3648 if left_node.end_byte() == range.start {
3649 let mut right_node = None;
3650 while !cursor.goto_next_sibling() {
3651 if !cursor.goto_parent() {
3652 break;
3653 }
3654 }
3655
3656 while cursor.node().start_byte() == range.start {
3657 right_node = Some(cursor.node());
3658 if !cursor.goto_first_child() {
3659 break;
3660 }
3661 }
3662
3663 // If there is a candidate node on both sides of the (empty) range, then
3664 // decide between the two by favoring a named node over an anonymous token.
3665 // If both nodes are the same in that regard, favor the right one.
3666 if let Some(right_node) = right_node
3667 && (right_node.is_named() || !left_node.is_named())
3668 {
3669 layer_result = right_node;
3670 }
3671 }
3672
3673 if let Some(previous_result) = &result
3674 && previous_result.byte_range().len() < layer_result.byte_range().len()
3675 {
3676 continue;
3677 }
3678 result = Some(layer_result);
3679 }
3680
3681 result
3682 }
3683
3684 /// Find the previous sibling syntax node at the given range.
3685 ///
3686 /// This function locates the syntax node that precedes the node containing
3687 /// the given range. It searches hierarchically by:
3688 /// 1. Finding the node that contains the given range
3689 /// 2. Looking for the previous sibling at the same tree level
3690 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3691 ///
3692 /// Returns `None` if there is no previous sibling at any ancestor level.
3693 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3694 &'a self,
3695 range: Range<T>,
3696 ) -> Option<tree_sitter::Node<'a>> {
3697 let range = range.start.to_offset(self)..range.end.to_offset(self);
3698 let mut result: Option<tree_sitter::Node<'a>> = None;
3699
3700 for layer in self
3701 .syntax
3702 .layers_for_range(range.clone(), &self.text, true)
3703 {
3704 let mut cursor = layer.node().walk();
3705
3706 // Find the node that contains the range
3707 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3708 continue;
3709 }
3710
3711 // Look for the previous sibling, moving up ancestor levels if needed
3712 loop {
3713 if cursor.goto_previous_sibling() {
3714 let layer_result = cursor.node();
3715
3716 if let Some(previous_result) = &result {
3717 if previous_result.byte_range().end < layer_result.byte_range().end {
3718 continue;
3719 }
3720 }
3721 result = Some(layer_result);
3722 break;
3723 }
3724
3725 // No sibling found at this level, try moving up to parent
3726 if !cursor.goto_parent() {
3727 break;
3728 }
3729 }
3730 }
3731
3732 result
3733 }
3734
3735 /// Find the next sibling syntax node at the given range.
3736 ///
3737 /// This function locates the syntax node that follows the node containing
3738 /// the given range. It searches hierarchically by:
3739 /// 1. Finding the node that contains the given range
3740 /// 2. Looking for the next sibling at the same tree level
3741 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3742 ///
3743 /// Returns `None` if there is no next sibling at any ancestor level.
3744 pub fn syntax_next_sibling<'a, T: ToOffset>(
3745 &'a self,
3746 range: Range<T>,
3747 ) -> Option<tree_sitter::Node<'a>> {
3748 let range = range.start.to_offset(self)..range.end.to_offset(self);
3749 let mut result: Option<tree_sitter::Node<'a>> = None;
3750
3751 for layer in self
3752 .syntax
3753 .layers_for_range(range.clone(), &self.text, true)
3754 {
3755 let mut cursor = layer.node().walk();
3756
3757 // Find the node that contains the range
3758 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3759 continue;
3760 }
3761
3762 // Look for the next sibling, moving up ancestor levels if needed
3763 loop {
3764 if cursor.goto_next_sibling() {
3765 let layer_result = cursor.node();
3766
3767 if let Some(previous_result) = &result {
3768 if previous_result.byte_range().start > layer_result.byte_range().start {
3769 continue;
3770 }
3771 }
3772 result = Some(layer_result);
3773 break;
3774 }
3775
3776 // No sibling found at this level, try moving up to parent
3777 if !cursor.goto_parent() {
3778 break;
3779 }
3780 }
3781 }
3782
3783 result
3784 }
3785
3786 /// Returns the root syntax node within the given row
3787 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3788 let start_offset = position.to_offset(self);
3789
3790 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3791
3792 let layer = self
3793 .syntax
3794 .layers_for_range(start_offset..start_offset, &self.text, true)
3795 .next()?;
3796
3797 let mut cursor = layer.node().walk();
3798
3799 // Descend to the first leaf that touches the start of the range.
3800 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3801 if cursor.node().end_byte() == start_offset {
3802 cursor.goto_next_sibling();
3803 }
3804 }
3805
3806 // Ascend to the root node within the same row.
3807 while cursor.goto_parent() {
3808 if cursor.node().start_position().row != row {
3809 break;
3810 }
3811 }
3812
3813 Some(cursor.node())
3814 }
3815
3816 /// Returns the outline for the buffer.
3817 ///
3818 /// This method allows passing an optional [`SyntaxTheme`] to
3819 /// syntax-highlight the returned symbols.
3820 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3821 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3822 }
3823
3824 /// Returns all the symbols that contain the given position.
3825 ///
3826 /// This method allows passing an optional [`SyntaxTheme`] to
3827 /// syntax-highlight the returned symbols.
3828 pub fn symbols_containing<T: ToOffset>(
3829 &self,
3830 position: T,
3831 theme: Option<&SyntaxTheme>,
3832 ) -> Vec<OutlineItem<Anchor>> {
3833 let position = position.to_offset(self);
3834 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3835 let end = self.clip_offset(position + 1, Bias::Right);
3836 let mut items = self.outline_items_containing(start..end, false, theme);
3837 let mut prev_depth = None;
3838 items.retain(|item| {
3839 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3840 prev_depth = Some(item.depth);
3841 result
3842 });
3843 items
3844 }
3845
3846 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3847 let range = range.to_offset(self);
3848 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3849 grammar.outline_config.as_ref().map(|c| &c.query)
3850 });
3851 let configs = matches
3852 .grammars()
3853 .iter()
3854 .map(|g| g.outline_config.as_ref().unwrap())
3855 .collect::<Vec<_>>();
3856
3857 while let Some(mat) = matches.peek() {
3858 let config = &configs[mat.grammar_index];
3859 let containing_item_node = maybe!({
3860 let item_node = mat.captures.iter().find_map(|cap| {
3861 if cap.index == config.item_capture_ix {
3862 Some(cap.node)
3863 } else {
3864 None
3865 }
3866 })?;
3867
3868 let item_byte_range = item_node.byte_range();
3869 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3870 None
3871 } else {
3872 Some(item_node)
3873 }
3874 });
3875
3876 if let Some(item_node) = containing_item_node {
3877 return Some(
3878 Point::from_ts_point(item_node.start_position())
3879 ..Point::from_ts_point(item_node.end_position()),
3880 );
3881 }
3882
3883 matches.advance();
3884 }
3885 None
3886 }
3887
3888 pub fn outline_items_containing<T: ToOffset>(
3889 &self,
3890 range: Range<T>,
3891 include_extra_context: bool,
3892 theme: Option<&SyntaxTheme>,
3893 ) -> Vec<OutlineItem<Anchor>> {
3894 self.outline_items_containing_internal(
3895 range,
3896 include_extra_context,
3897 theme,
3898 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3899 )
3900 }
3901
3902 pub fn outline_items_as_points_containing<T: ToOffset>(
3903 &self,
3904 range: Range<T>,
3905 include_extra_context: bool,
3906 theme: Option<&SyntaxTheme>,
3907 ) -> Vec<OutlineItem<Point>> {
3908 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3909 range
3910 })
3911 }
3912
3913 fn outline_items_containing_internal<T: ToOffset, U>(
3914 &self,
3915 range: Range<T>,
3916 include_extra_context: bool,
3917 theme: Option<&SyntaxTheme>,
3918 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3919 ) -> Vec<OutlineItem<U>> {
3920 let range = range.to_offset(self);
3921 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3922 grammar.outline_config.as_ref().map(|c| &c.query)
3923 });
3924
3925 let mut items = Vec::new();
3926 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3927 while let Some(mat) = matches.peek() {
3928 let config = matches.grammars()[mat.grammar_index]
3929 .outline_config
3930 .as_ref()
3931 .unwrap();
3932 if let Some(item) =
3933 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3934 {
3935 items.push(item);
3936 } else if let Some(capture) = mat
3937 .captures
3938 .iter()
3939 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3940 {
3941 let capture_range = capture.node.start_position()..capture.node.end_position();
3942 let mut capture_row_range =
3943 capture_range.start.row as u32..capture_range.end.row as u32;
3944 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3945 {
3946 capture_row_range.end -= 1;
3947 }
3948 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3949 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3950 last_row_range.end = capture_row_range.end;
3951 } else {
3952 annotation_row_ranges.push(capture_row_range);
3953 }
3954 } else {
3955 annotation_row_ranges.push(capture_row_range);
3956 }
3957 }
3958 matches.advance();
3959 }
3960
3961 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3962
3963 // Assign depths based on containment relationships and convert to anchors.
3964 let mut item_ends_stack = Vec::<Point>::new();
3965 let mut anchor_items = Vec::new();
3966 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3967 for item in items {
3968 while let Some(last_end) = item_ends_stack.last().copied() {
3969 if last_end < item.range.end {
3970 item_ends_stack.pop();
3971 } else {
3972 break;
3973 }
3974 }
3975
3976 let mut annotation_row_range = None;
3977 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3978 let row_preceding_item = item.range.start.row.saturating_sub(1);
3979 if next_annotation_row_range.end < row_preceding_item {
3980 annotation_row_ranges.next();
3981 } else {
3982 if next_annotation_row_range.end == row_preceding_item {
3983 annotation_row_range = Some(next_annotation_row_range.clone());
3984 annotation_row_ranges.next();
3985 }
3986 break;
3987 }
3988 }
3989
3990 anchor_items.push(OutlineItem {
3991 depth: item_ends_stack.len(),
3992 range: range_callback(self, item.range.clone()),
3993 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3994 text: item.text,
3995 highlight_ranges: item.highlight_ranges,
3996 name_ranges: item.name_ranges,
3997 body_range: item.body_range.map(|r| range_callback(self, r)),
3998 annotation_range: annotation_row_range.map(|annotation_range| {
3999 let point_range = Point::new(annotation_range.start, 0)
4000 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4001 range_callback(self, point_range)
4002 }),
4003 });
4004 item_ends_stack.push(item.range.end);
4005 }
4006
4007 anchor_items
4008 }
4009
4010 fn next_outline_item(
4011 &self,
4012 config: &OutlineConfig,
4013 mat: &SyntaxMapMatch,
4014 range: &Range<usize>,
4015 include_extra_context: bool,
4016 theme: Option<&SyntaxTheme>,
4017 ) -> Option<OutlineItem<Point>> {
4018 let item_node = mat.captures.iter().find_map(|cap| {
4019 if cap.index == config.item_capture_ix {
4020 Some(cap.node)
4021 } else {
4022 None
4023 }
4024 })?;
4025
4026 let item_byte_range = item_node.byte_range();
4027 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4028 return None;
4029 }
4030 let item_point_range = Point::from_ts_point(item_node.start_position())
4031 ..Point::from_ts_point(item_node.end_position());
4032
4033 let mut open_point = None;
4034 let mut close_point = None;
4035
4036 let mut buffer_ranges = Vec::new();
4037 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4038 let mut range = node.start_byte()..node.end_byte();
4039 let start = node.start_position();
4040 if node.end_position().row > start.row {
4041 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4042 }
4043
4044 if !range.is_empty() {
4045 buffer_ranges.push((range, node_is_name));
4046 }
4047 };
4048
4049 for capture in mat.captures {
4050 if capture.index == config.name_capture_ix {
4051 add_to_buffer_ranges(capture.node, true);
4052 } else if Some(capture.index) == config.context_capture_ix
4053 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4054 {
4055 add_to_buffer_ranges(capture.node, false);
4056 } else {
4057 if Some(capture.index) == config.open_capture_ix {
4058 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4059 } else if Some(capture.index) == config.close_capture_ix {
4060 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4061 }
4062 }
4063 }
4064
4065 if buffer_ranges.is_empty() {
4066 return None;
4067 }
4068 let source_range_for_text =
4069 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4070
4071 let mut text = String::new();
4072 let mut highlight_ranges = Vec::new();
4073 let mut name_ranges = Vec::new();
4074 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4075 let mut last_buffer_range_end = 0;
4076 for (buffer_range, is_name) in buffer_ranges {
4077 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4078 if space_added {
4079 text.push(' ');
4080 }
4081 let before_append_len = text.len();
4082 let mut offset = buffer_range.start;
4083 chunks.seek(buffer_range.clone());
4084 for mut chunk in chunks.by_ref() {
4085 if chunk.text.len() > buffer_range.end - offset {
4086 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4087 offset = buffer_range.end;
4088 } else {
4089 offset += chunk.text.len();
4090 }
4091 let style = chunk
4092 .syntax_highlight_id
4093 .zip(theme)
4094 .and_then(|(highlight, theme)| highlight.style(theme));
4095 if let Some(style) = style {
4096 let start = text.len();
4097 let end = start + chunk.text.len();
4098 highlight_ranges.push((start..end, style));
4099 }
4100 text.push_str(chunk.text);
4101 if offset >= buffer_range.end {
4102 break;
4103 }
4104 }
4105 if is_name {
4106 let after_append_len = text.len();
4107 let start = if space_added && !name_ranges.is_empty() {
4108 before_append_len - 1
4109 } else {
4110 before_append_len
4111 };
4112 name_ranges.push(start..after_append_len);
4113 }
4114 last_buffer_range_end = buffer_range.end;
4115 }
4116
4117 Some(OutlineItem {
4118 depth: 0, // We'll calculate the depth later
4119 range: item_point_range,
4120 source_range_for_text: source_range_for_text.to_point(self),
4121 text,
4122 highlight_ranges,
4123 name_ranges,
4124 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4125 annotation_range: None,
4126 })
4127 }
4128
4129 pub fn function_body_fold_ranges<T: ToOffset>(
4130 &self,
4131 within: Range<T>,
4132 ) -> impl Iterator<Item = Range<usize>> + '_ {
4133 self.text_object_ranges(within, TreeSitterOptions::default())
4134 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4135 }
4136
4137 /// For each grammar in the language, runs the provided
4138 /// [`tree_sitter::Query`] against the given range.
4139 pub fn matches(
4140 &self,
4141 range: Range<usize>,
4142 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4143 ) -> SyntaxMapMatches<'_> {
4144 self.syntax.matches(range, self, query)
4145 }
4146
4147 pub fn all_bracket_ranges(
4148 &self,
4149 range: Range<usize>,
4150 ) -> impl Iterator<Item = BracketMatch> + '_ {
4151 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4152 grammar.brackets_config.as_ref().map(|c| &c.query)
4153 });
4154 let configs = matches
4155 .grammars()
4156 .iter()
4157 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4158 .collect::<Vec<_>>();
4159
4160 iter::from_fn(move || {
4161 while let Some(mat) = matches.peek() {
4162 let mut open = None;
4163 let mut close = None;
4164 let config = &configs[mat.grammar_index];
4165 let pattern = &config.patterns[mat.pattern_index];
4166 for capture in mat.captures {
4167 if capture.index == config.open_capture_ix {
4168 open = Some(capture.node.byte_range());
4169 } else if capture.index == config.close_capture_ix {
4170 close = Some(capture.node.byte_range());
4171 }
4172 }
4173
4174 matches.advance();
4175
4176 let Some((open_range, close_range)) = open.zip(close) else {
4177 continue;
4178 };
4179
4180 let bracket_range = open_range.start..=close_range.end;
4181 if !bracket_range.overlaps(&range) {
4182 continue;
4183 }
4184
4185 return Some(BracketMatch {
4186 open_range,
4187 close_range,
4188 newline_only: pattern.newline_only,
4189 });
4190 }
4191 None
4192 })
4193 }
4194
4195 /// Returns bracket range pairs overlapping or adjacent to `range`
4196 pub fn bracket_ranges<T: ToOffset>(
4197 &self,
4198 range: Range<T>,
4199 ) -> impl Iterator<Item = BracketMatch> + '_ {
4200 // Find bracket pairs that *inclusively* contain the given range.
4201 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4202 self.all_bracket_ranges(range)
4203 .filter(|pair| !pair.newline_only)
4204 }
4205
4206 pub fn debug_variables_query<T: ToOffset>(
4207 &self,
4208 range: Range<T>,
4209 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4210 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4211
4212 let mut matches = self.syntax.matches_with_options(
4213 range.clone(),
4214 &self.text,
4215 TreeSitterOptions::default(),
4216 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4217 );
4218
4219 let configs = matches
4220 .grammars()
4221 .iter()
4222 .map(|grammar| grammar.debug_variables_config.as_ref())
4223 .collect::<Vec<_>>();
4224
4225 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4226
4227 iter::from_fn(move || {
4228 loop {
4229 while let Some(capture) = captures.pop() {
4230 if capture.0.overlaps(&range) {
4231 return Some(capture);
4232 }
4233 }
4234
4235 let mat = matches.peek()?;
4236
4237 let Some(config) = configs[mat.grammar_index].as_ref() else {
4238 matches.advance();
4239 continue;
4240 };
4241
4242 for capture in mat.captures {
4243 let Some(ix) = config
4244 .objects_by_capture_ix
4245 .binary_search_by_key(&capture.index, |e| e.0)
4246 .ok()
4247 else {
4248 continue;
4249 };
4250 let text_object = config.objects_by_capture_ix[ix].1;
4251 let byte_range = capture.node.byte_range();
4252
4253 let mut found = false;
4254 for (range, existing) in captures.iter_mut() {
4255 if existing == &text_object {
4256 range.start = range.start.min(byte_range.start);
4257 range.end = range.end.max(byte_range.end);
4258 found = true;
4259 break;
4260 }
4261 }
4262
4263 if !found {
4264 captures.push((byte_range, text_object));
4265 }
4266 }
4267
4268 matches.advance();
4269 }
4270 })
4271 }
4272
4273 pub fn text_object_ranges<T: ToOffset>(
4274 &self,
4275 range: Range<T>,
4276 options: TreeSitterOptions,
4277 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4278 let range =
4279 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4280
4281 let mut matches =
4282 self.syntax
4283 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4284 grammar.text_object_config.as_ref().map(|c| &c.query)
4285 });
4286
4287 let configs = matches
4288 .grammars()
4289 .iter()
4290 .map(|grammar| grammar.text_object_config.as_ref())
4291 .collect::<Vec<_>>();
4292
4293 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4294
4295 iter::from_fn(move || {
4296 loop {
4297 while let Some(capture) = captures.pop() {
4298 if capture.0.overlaps(&range) {
4299 return Some(capture);
4300 }
4301 }
4302
4303 let mat = matches.peek()?;
4304
4305 let Some(config) = configs[mat.grammar_index].as_ref() else {
4306 matches.advance();
4307 continue;
4308 };
4309
4310 for capture in mat.captures {
4311 let Some(ix) = config
4312 .text_objects_by_capture_ix
4313 .binary_search_by_key(&capture.index, |e| e.0)
4314 .ok()
4315 else {
4316 continue;
4317 };
4318 let text_object = config.text_objects_by_capture_ix[ix].1;
4319 let byte_range = capture.node.byte_range();
4320
4321 let mut found = false;
4322 for (range, existing) in captures.iter_mut() {
4323 if existing == &text_object {
4324 range.start = range.start.min(byte_range.start);
4325 range.end = range.end.max(byte_range.end);
4326 found = true;
4327 break;
4328 }
4329 }
4330
4331 if !found {
4332 captures.push((byte_range, text_object));
4333 }
4334 }
4335
4336 matches.advance();
4337 }
4338 })
4339 }
4340
4341 /// Returns enclosing bracket ranges containing the given range
4342 pub fn enclosing_bracket_ranges<T: ToOffset>(
4343 &self,
4344 range: Range<T>,
4345 ) -> impl Iterator<Item = BracketMatch> + '_ {
4346 let range = range.start.to_offset(self)..range.end.to_offset(self);
4347
4348 self.bracket_ranges(range.clone()).filter(move |pair| {
4349 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4350 })
4351 }
4352
4353 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4354 ///
4355 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4356 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4357 &self,
4358 range: Range<T>,
4359 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4360 ) -> Option<(Range<usize>, Range<usize>)> {
4361 let range = range.start.to_offset(self)..range.end.to_offset(self);
4362
4363 // Get the ranges of the innermost pair of brackets.
4364 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4365
4366 for pair in self.enclosing_bracket_ranges(range) {
4367 if let Some(range_filter) = range_filter
4368 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4369 {
4370 continue;
4371 }
4372
4373 let len = pair.close_range.end - pair.open_range.start;
4374
4375 if let Some((existing_open, existing_close)) = &result {
4376 let existing_len = existing_close.end - existing_open.start;
4377 if len > existing_len {
4378 continue;
4379 }
4380 }
4381
4382 result = Some((pair.open_range, pair.close_range));
4383 }
4384
4385 result
4386 }
4387
4388 /// Returns anchor ranges for any matches of the redaction query.
4389 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4390 /// will be run on the relevant section of the buffer.
4391 pub fn redacted_ranges<T: ToOffset>(
4392 &self,
4393 range: Range<T>,
4394 ) -> impl Iterator<Item = Range<usize>> + '_ {
4395 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4396 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4397 grammar
4398 .redactions_config
4399 .as_ref()
4400 .map(|config| &config.query)
4401 });
4402
4403 let configs = syntax_matches
4404 .grammars()
4405 .iter()
4406 .map(|grammar| grammar.redactions_config.as_ref())
4407 .collect::<Vec<_>>();
4408
4409 iter::from_fn(move || {
4410 let redacted_range = syntax_matches
4411 .peek()
4412 .and_then(|mat| {
4413 configs[mat.grammar_index].and_then(|config| {
4414 mat.captures
4415 .iter()
4416 .find(|capture| capture.index == config.redaction_capture_ix)
4417 })
4418 })
4419 .map(|mat| mat.node.byte_range());
4420 syntax_matches.advance();
4421 redacted_range
4422 })
4423 }
4424
4425 pub fn injections_intersecting_range<T: ToOffset>(
4426 &self,
4427 range: Range<T>,
4428 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4429 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4430
4431 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4432 grammar
4433 .injection_config
4434 .as_ref()
4435 .map(|config| &config.query)
4436 });
4437
4438 let configs = syntax_matches
4439 .grammars()
4440 .iter()
4441 .map(|grammar| grammar.injection_config.as_ref())
4442 .collect::<Vec<_>>();
4443
4444 iter::from_fn(move || {
4445 let ranges = syntax_matches.peek().and_then(|mat| {
4446 let config = &configs[mat.grammar_index]?;
4447 let content_capture_range = mat.captures.iter().find_map(|capture| {
4448 if capture.index == config.content_capture_ix {
4449 Some(capture.node.byte_range())
4450 } else {
4451 None
4452 }
4453 })?;
4454 let language = self.language_at(content_capture_range.start)?;
4455 Some((content_capture_range, language))
4456 });
4457 syntax_matches.advance();
4458 ranges
4459 })
4460 }
4461
4462 pub fn runnable_ranges(
4463 &self,
4464 offset_range: Range<usize>,
4465 ) -> impl Iterator<Item = RunnableRange> + '_ {
4466 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4467 grammar.runnable_config.as_ref().map(|config| &config.query)
4468 });
4469
4470 let test_configs = syntax_matches
4471 .grammars()
4472 .iter()
4473 .map(|grammar| grammar.runnable_config.as_ref())
4474 .collect::<Vec<_>>();
4475
4476 iter::from_fn(move || {
4477 loop {
4478 let mat = syntax_matches.peek()?;
4479
4480 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4481 let mut run_range = None;
4482 let full_range = mat.captures.iter().fold(
4483 Range {
4484 start: usize::MAX,
4485 end: 0,
4486 },
4487 |mut acc, next| {
4488 let byte_range = next.node.byte_range();
4489 if acc.start > byte_range.start {
4490 acc.start = byte_range.start;
4491 }
4492 if acc.end < byte_range.end {
4493 acc.end = byte_range.end;
4494 }
4495 acc
4496 },
4497 );
4498 if full_range.start > full_range.end {
4499 // We did not find a full spanning range of this match.
4500 return None;
4501 }
4502 let extra_captures: SmallVec<[_; 1]> =
4503 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4504 test_configs
4505 .extra_captures
4506 .get(capture.index as usize)
4507 .cloned()
4508 .and_then(|tag_name| match tag_name {
4509 RunnableCapture::Named(name) => {
4510 Some((capture.node.byte_range(), name))
4511 }
4512 RunnableCapture::Run => {
4513 let _ = run_range.insert(capture.node.byte_range());
4514 None
4515 }
4516 })
4517 }));
4518 let run_range = run_range?;
4519 let tags = test_configs
4520 .query
4521 .property_settings(mat.pattern_index)
4522 .iter()
4523 .filter_map(|property| {
4524 if *property.key == *"tag" {
4525 property
4526 .value
4527 .as_ref()
4528 .map(|value| RunnableTag(value.to_string().into()))
4529 } else {
4530 None
4531 }
4532 })
4533 .collect();
4534 let extra_captures = extra_captures
4535 .into_iter()
4536 .map(|(range, name)| {
4537 (
4538 name.to_string(),
4539 self.text_for_range(range).collect::<String>(),
4540 )
4541 })
4542 .collect();
4543 // All tags should have the same range.
4544 Some(RunnableRange {
4545 run_range,
4546 full_range,
4547 runnable: Runnable {
4548 tags,
4549 language: mat.language,
4550 buffer: self.remote_id(),
4551 },
4552 extra_captures,
4553 buffer_id: self.remote_id(),
4554 })
4555 });
4556
4557 syntax_matches.advance();
4558 if test_range.is_some() {
4559 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4560 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4561 return test_range;
4562 }
4563 }
4564 })
4565 }
4566
4567 /// Returns selections for remote peers intersecting the given range.
4568 #[allow(clippy::type_complexity)]
4569 pub fn selections_in_range(
4570 &self,
4571 range: Range<Anchor>,
4572 include_local: bool,
4573 ) -> impl Iterator<
4574 Item = (
4575 ReplicaId,
4576 bool,
4577 CursorShape,
4578 impl Iterator<Item = &Selection<Anchor>> + '_,
4579 ),
4580 > + '_ {
4581 self.remote_selections
4582 .iter()
4583 .filter(move |(replica_id, set)| {
4584 (include_local || **replica_id != self.text.replica_id())
4585 && !set.selections.is_empty()
4586 })
4587 .map(move |(replica_id, set)| {
4588 let start_ix = match set.selections.binary_search_by(|probe| {
4589 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4590 }) {
4591 Ok(ix) | Err(ix) => ix,
4592 };
4593 let end_ix = match set.selections.binary_search_by(|probe| {
4594 probe.start.cmp(&range.end, self).then(Ordering::Less)
4595 }) {
4596 Ok(ix) | Err(ix) => ix,
4597 };
4598
4599 (
4600 *replica_id,
4601 set.line_mode,
4602 set.cursor_shape,
4603 set.selections[start_ix..end_ix].iter(),
4604 )
4605 })
4606 }
4607
4608 /// Returns if the buffer contains any diagnostics.
4609 pub fn has_diagnostics(&self) -> bool {
4610 !self.diagnostics.is_empty()
4611 }
4612
4613 /// Returns all the diagnostics intersecting the given range.
4614 pub fn diagnostics_in_range<'a, T, O>(
4615 &'a self,
4616 search_range: Range<T>,
4617 reversed: bool,
4618 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4619 where
4620 T: 'a + Clone + ToOffset,
4621 O: 'a + FromAnchor,
4622 {
4623 let mut iterators: Vec<_> = self
4624 .diagnostics
4625 .iter()
4626 .map(|(_, collection)| {
4627 collection
4628 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4629 .peekable()
4630 })
4631 .collect();
4632
4633 std::iter::from_fn(move || {
4634 let (next_ix, _) = iterators
4635 .iter_mut()
4636 .enumerate()
4637 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4638 .min_by(|(_, a), (_, b)| {
4639 let cmp = a
4640 .range
4641 .start
4642 .cmp(&b.range.start, self)
4643 // when range is equal, sort by diagnostic severity
4644 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4645 // and stabilize order with group_id
4646 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4647 if reversed { cmp.reverse() } else { cmp }
4648 })?;
4649 iterators[next_ix]
4650 .next()
4651 .map(
4652 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4653 diagnostic,
4654 range: FromAnchor::from_anchor(&range.start, self)
4655 ..FromAnchor::from_anchor(&range.end, self),
4656 },
4657 )
4658 })
4659 }
4660
4661 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4662 /// should be used instead.
4663 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4664 &self.diagnostics
4665 }
4666
4667 /// Returns all the diagnostic groups associated with the given
4668 /// language server ID. If no language server ID is provided,
4669 /// all diagnostics groups are returned.
4670 pub fn diagnostic_groups(
4671 &self,
4672 language_server_id: Option<LanguageServerId>,
4673 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4674 let mut groups = Vec::new();
4675
4676 if let Some(language_server_id) = language_server_id {
4677 if let Ok(ix) = self
4678 .diagnostics
4679 .binary_search_by_key(&language_server_id, |e| e.0)
4680 {
4681 self.diagnostics[ix]
4682 .1
4683 .groups(language_server_id, &mut groups, self);
4684 }
4685 } else {
4686 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4687 diagnostics.groups(*language_server_id, &mut groups, self);
4688 }
4689 }
4690
4691 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4692 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4693 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4694 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4695 });
4696
4697 groups
4698 }
4699
4700 /// Returns an iterator over the diagnostics for the given group.
4701 pub fn diagnostic_group<O>(
4702 &self,
4703 group_id: usize,
4704 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4705 where
4706 O: FromAnchor + 'static,
4707 {
4708 self.diagnostics
4709 .iter()
4710 .flat_map(move |(_, set)| set.group(group_id, self))
4711 }
4712
4713 /// An integer version number that accounts for all updates besides
4714 /// the buffer's text itself (which is versioned via a version vector).
4715 pub fn non_text_state_update_count(&self) -> usize {
4716 self.non_text_state_update_count
4717 }
4718
4719 /// An integer version that changes when the buffer's syntax changes.
4720 pub fn syntax_update_count(&self) -> usize {
4721 self.syntax.update_count()
4722 }
4723
4724 /// Returns a snapshot of underlying file.
4725 pub fn file(&self) -> Option<&Arc<dyn File>> {
4726 self.file.as_ref()
4727 }
4728
4729 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4730 if let Some(file) = self.file() {
4731 if file.path().file_name().is_none() || include_root {
4732 Some(file.full_path(cx).to_string_lossy().into_owned())
4733 } else {
4734 Some(file.path().display(file.path_style(cx)).to_string())
4735 }
4736 } else {
4737 None
4738 }
4739 }
4740
4741 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4742 let query_str = query.fuzzy_contents;
4743 if query_str.is_some_and(|query| query.is_empty()) {
4744 return BTreeMap::default();
4745 }
4746
4747 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4748 language,
4749 override_id: None,
4750 }));
4751
4752 let mut query_ix = 0;
4753 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4754 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4755
4756 let mut words = BTreeMap::default();
4757 let mut current_word_start_ix = None;
4758 let mut chunk_ix = query.range.start;
4759 for chunk in self.chunks(query.range, false) {
4760 for (i, c) in chunk.text.char_indices() {
4761 let ix = chunk_ix + i;
4762 if classifier.is_word(c) {
4763 if current_word_start_ix.is_none() {
4764 current_word_start_ix = Some(ix);
4765 }
4766
4767 if let Some(query_chars) = &query_chars
4768 && query_ix < query_len
4769 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4770 {
4771 query_ix += 1;
4772 }
4773 continue;
4774 } else if let Some(word_start) = current_word_start_ix.take()
4775 && query_ix == query_len
4776 {
4777 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4778 let mut word_text = self.text_for_range(word_start..ix).peekable();
4779 let first_char = word_text
4780 .peek()
4781 .and_then(|first_chunk| first_chunk.chars().next());
4782 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4783 if !query.skip_digits
4784 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4785 {
4786 words.insert(word_text.collect(), word_range);
4787 }
4788 }
4789 query_ix = 0;
4790 }
4791 chunk_ix += chunk.text.len();
4792 }
4793
4794 words
4795 }
4796}
4797
4798pub struct WordsQuery<'a> {
4799 /// Only returns words with all chars from the fuzzy string in them.
4800 pub fuzzy_contents: Option<&'a str>,
4801 /// Skips words that start with a digit.
4802 pub skip_digits: bool,
4803 /// Buffer offset range, to look for words.
4804 pub range: Range<usize>,
4805}
4806
4807fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4808 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4809}
4810
4811fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4812 let mut result = IndentSize::spaces(0);
4813 for c in text {
4814 let kind = match c {
4815 ' ' => IndentKind::Space,
4816 '\t' => IndentKind::Tab,
4817 _ => break,
4818 };
4819 if result.len == 0 {
4820 result.kind = kind;
4821 }
4822 result.len += 1;
4823 }
4824 result
4825}
4826
4827impl Clone for BufferSnapshot {
4828 fn clone(&self) -> Self {
4829 Self {
4830 text: self.text.clone(),
4831 syntax: self.syntax.clone(),
4832 file: self.file.clone(),
4833 remote_selections: self.remote_selections.clone(),
4834 diagnostics: self.diagnostics.clone(),
4835 language: self.language.clone(),
4836 non_text_state_update_count: self.non_text_state_update_count,
4837 }
4838 }
4839}
4840
4841impl Deref for BufferSnapshot {
4842 type Target = text::BufferSnapshot;
4843
4844 fn deref(&self) -> &Self::Target {
4845 &self.text
4846 }
4847}
4848
4849unsafe impl Send for BufferChunks<'_> {}
4850
4851impl<'a> BufferChunks<'a> {
4852 pub(crate) fn new(
4853 text: &'a Rope,
4854 range: Range<usize>,
4855 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4856 diagnostics: bool,
4857 buffer_snapshot: Option<&'a BufferSnapshot>,
4858 ) -> Self {
4859 let mut highlights = None;
4860 if let Some((captures, highlight_maps)) = syntax {
4861 highlights = Some(BufferChunkHighlights {
4862 captures,
4863 next_capture: None,
4864 stack: Default::default(),
4865 highlight_maps,
4866 })
4867 }
4868
4869 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4870 let chunks = text.chunks_in_range(range.clone());
4871
4872 let mut this = BufferChunks {
4873 range,
4874 buffer_snapshot,
4875 chunks,
4876 diagnostic_endpoints,
4877 error_depth: 0,
4878 warning_depth: 0,
4879 information_depth: 0,
4880 hint_depth: 0,
4881 unnecessary_depth: 0,
4882 underline: true,
4883 highlights,
4884 };
4885 this.initialize_diagnostic_endpoints();
4886 this
4887 }
4888
4889 /// Seeks to the given byte offset in the buffer.
4890 pub fn seek(&mut self, range: Range<usize>) {
4891 let old_range = std::mem::replace(&mut self.range, range.clone());
4892 self.chunks.set_range(self.range.clone());
4893 if let Some(highlights) = self.highlights.as_mut() {
4894 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4895 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4896 highlights
4897 .stack
4898 .retain(|(end_offset, _)| *end_offset > range.start);
4899 if let Some(capture) = &highlights.next_capture
4900 && range.start >= capture.node.start_byte()
4901 {
4902 let next_capture_end = capture.node.end_byte();
4903 if range.start < next_capture_end {
4904 highlights.stack.push((
4905 next_capture_end,
4906 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4907 ));
4908 }
4909 highlights.next_capture.take();
4910 }
4911 } else if let Some(snapshot) = self.buffer_snapshot {
4912 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4913 *highlights = BufferChunkHighlights {
4914 captures,
4915 next_capture: None,
4916 stack: Default::default(),
4917 highlight_maps,
4918 };
4919 } else {
4920 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4921 // Seeking such BufferChunks is not supported.
4922 debug_assert!(
4923 false,
4924 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4925 );
4926 }
4927
4928 highlights.captures.set_byte_range(self.range.clone());
4929 self.initialize_diagnostic_endpoints();
4930 }
4931 }
4932
4933 fn initialize_diagnostic_endpoints(&mut self) {
4934 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4935 && let Some(buffer) = self.buffer_snapshot
4936 {
4937 let mut diagnostic_endpoints = Vec::new();
4938 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4939 diagnostic_endpoints.push(DiagnosticEndpoint {
4940 offset: entry.range.start,
4941 is_start: true,
4942 severity: entry.diagnostic.severity,
4943 is_unnecessary: entry.diagnostic.is_unnecessary,
4944 underline: entry.diagnostic.underline,
4945 });
4946 diagnostic_endpoints.push(DiagnosticEndpoint {
4947 offset: entry.range.end,
4948 is_start: false,
4949 severity: entry.diagnostic.severity,
4950 is_unnecessary: entry.diagnostic.is_unnecessary,
4951 underline: entry.diagnostic.underline,
4952 });
4953 }
4954 diagnostic_endpoints
4955 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4956 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4957 self.hint_depth = 0;
4958 self.error_depth = 0;
4959 self.warning_depth = 0;
4960 self.information_depth = 0;
4961 }
4962 }
4963
4964 /// The current byte offset in the buffer.
4965 pub fn offset(&self) -> usize {
4966 self.range.start
4967 }
4968
4969 pub fn range(&self) -> Range<usize> {
4970 self.range.clone()
4971 }
4972
4973 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4974 let depth = match endpoint.severity {
4975 DiagnosticSeverity::ERROR => &mut self.error_depth,
4976 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4977 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4978 DiagnosticSeverity::HINT => &mut self.hint_depth,
4979 _ => return,
4980 };
4981 if endpoint.is_start {
4982 *depth += 1;
4983 } else {
4984 *depth -= 1;
4985 }
4986
4987 if endpoint.is_unnecessary {
4988 if endpoint.is_start {
4989 self.unnecessary_depth += 1;
4990 } else {
4991 self.unnecessary_depth -= 1;
4992 }
4993 }
4994 }
4995
4996 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4997 if self.error_depth > 0 {
4998 Some(DiagnosticSeverity::ERROR)
4999 } else if self.warning_depth > 0 {
5000 Some(DiagnosticSeverity::WARNING)
5001 } else if self.information_depth > 0 {
5002 Some(DiagnosticSeverity::INFORMATION)
5003 } else if self.hint_depth > 0 {
5004 Some(DiagnosticSeverity::HINT)
5005 } else {
5006 None
5007 }
5008 }
5009
5010 fn current_code_is_unnecessary(&self) -> bool {
5011 self.unnecessary_depth > 0
5012 }
5013}
5014
5015impl<'a> Iterator for BufferChunks<'a> {
5016 type Item = Chunk<'a>;
5017
5018 fn next(&mut self) -> Option<Self::Item> {
5019 let mut next_capture_start = usize::MAX;
5020 let mut next_diagnostic_endpoint = usize::MAX;
5021
5022 if let Some(highlights) = self.highlights.as_mut() {
5023 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5024 if *parent_capture_end <= self.range.start {
5025 highlights.stack.pop();
5026 } else {
5027 break;
5028 }
5029 }
5030
5031 if highlights.next_capture.is_none() {
5032 highlights.next_capture = highlights.captures.next();
5033 }
5034
5035 while let Some(capture) = highlights.next_capture.as_ref() {
5036 if self.range.start < capture.node.start_byte() {
5037 next_capture_start = capture.node.start_byte();
5038 break;
5039 } else {
5040 let highlight_id =
5041 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5042 highlights
5043 .stack
5044 .push((capture.node.end_byte(), highlight_id));
5045 highlights.next_capture = highlights.captures.next();
5046 }
5047 }
5048 }
5049
5050 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5051 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5052 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5053 if endpoint.offset <= self.range.start {
5054 self.update_diagnostic_depths(endpoint);
5055 diagnostic_endpoints.next();
5056 self.underline = endpoint.underline;
5057 } else {
5058 next_diagnostic_endpoint = endpoint.offset;
5059 break;
5060 }
5061 }
5062 }
5063 self.diagnostic_endpoints = diagnostic_endpoints;
5064
5065 if let Some(ChunkBitmaps {
5066 text: chunk,
5067 chars: chars_map,
5068 tabs,
5069 }) = self.chunks.peek_with_bitmaps()
5070 {
5071 let chunk_start = self.range.start;
5072 let mut chunk_end = (self.chunks.offset() + chunk.len())
5073 .min(next_capture_start)
5074 .min(next_diagnostic_endpoint);
5075 let mut highlight_id = None;
5076 if let Some(highlights) = self.highlights.as_ref()
5077 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5078 {
5079 chunk_end = chunk_end.min(*parent_capture_end);
5080 highlight_id = Some(*parent_highlight_id);
5081 }
5082 let bit_start = chunk_start - self.chunks.offset();
5083 let bit_end = chunk_end - self.chunks.offset();
5084
5085 let slice = &chunk[bit_start..bit_end];
5086
5087 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5088 let tabs = (tabs >> bit_start) & mask;
5089 let chars = (chars_map >> bit_start) & mask;
5090
5091 self.range.start = chunk_end;
5092 if self.range.start == self.chunks.offset() + chunk.len() {
5093 self.chunks.next().unwrap();
5094 }
5095
5096 Some(Chunk {
5097 text: slice,
5098 syntax_highlight_id: highlight_id,
5099 underline: self.underline,
5100 diagnostic_severity: self.current_diagnostic_severity(),
5101 is_unnecessary: self.current_code_is_unnecessary(),
5102 tabs,
5103 chars,
5104 ..Chunk::default()
5105 })
5106 } else {
5107 None
5108 }
5109 }
5110}
5111
5112impl operation_queue::Operation for Operation {
5113 fn lamport_timestamp(&self) -> clock::Lamport {
5114 match self {
5115 Operation::Buffer(_) => {
5116 unreachable!("buffer operations should never be deferred at this layer")
5117 }
5118 Operation::UpdateDiagnostics {
5119 lamport_timestamp, ..
5120 }
5121 | Operation::UpdateSelections {
5122 lamport_timestamp, ..
5123 }
5124 | Operation::UpdateCompletionTriggers {
5125 lamport_timestamp, ..
5126 }
5127 | Operation::UpdateLineEnding {
5128 lamport_timestamp, ..
5129 } => *lamport_timestamp,
5130 }
5131 }
5132}
5133
5134impl Default for Diagnostic {
5135 fn default() -> Self {
5136 Self {
5137 source: Default::default(),
5138 source_kind: DiagnosticSourceKind::Other,
5139 code: None,
5140 code_description: None,
5141 severity: DiagnosticSeverity::ERROR,
5142 message: Default::default(),
5143 markdown: None,
5144 group_id: 0,
5145 is_primary: false,
5146 is_disk_based: false,
5147 is_unnecessary: false,
5148 underline: true,
5149 data: None,
5150 }
5151 }
5152}
5153
5154impl IndentSize {
5155 /// Returns an [`IndentSize`] representing the given spaces.
5156 pub fn spaces(len: u32) -> Self {
5157 Self {
5158 len,
5159 kind: IndentKind::Space,
5160 }
5161 }
5162
5163 /// Returns an [`IndentSize`] representing a tab.
5164 pub fn tab() -> Self {
5165 Self {
5166 len: 1,
5167 kind: IndentKind::Tab,
5168 }
5169 }
5170
5171 /// An iterator over the characters represented by this [`IndentSize`].
5172 pub fn chars(&self) -> impl Iterator<Item = char> {
5173 iter::repeat(self.char()).take(self.len as usize)
5174 }
5175
5176 /// The character representation of this [`IndentSize`].
5177 pub fn char(&self) -> char {
5178 match self.kind {
5179 IndentKind::Space => ' ',
5180 IndentKind::Tab => '\t',
5181 }
5182 }
5183
5184 /// Consumes the current [`IndentSize`] and returns a new one that has
5185 /// been shrunk or enlarged by the given size along the given direction.
5186 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5187 match direction {
5188 Ordering::Less => {
5189 if self.kind == size.kind && self.len >= size.len {
5190 self.len -= size.len;
5191 }
5192 }
5193 Ordering::Equal => {}
5194 Ordering::Greater => {
5195 if self.len == 0 {
5196 self = size;
5197 } else if self.kind == size.kind {
5198 self.len += size.len;
5199 }
5200 }
5201 }
5202 self
5203 }
5204
5205 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5206 match self.kind {
5207 IndentKind::Space => self.len as usize,
5208 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5209 }
5210 }
5211}
5212
5213#[cfg(any(test, feature = "test-support"))]
5214pub struct TestFile {
5215 pub path: Arc<RelPath>,
5216 pub root_name: String,
5217 pub local_root: Option<PathBuf>,
5218}
5219
5220#[cfg(any(test, feature = "test-support"))]
5221impl File for TestFile {
5222 fn path(&self) -> &Arc<RelPath> {
5223 &self.path
5224 }
5225
5226 fn full_path(&self, _: &gpui::App) -> PathBuf {
5227 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5228 }
5229
5230 fn as_local(&self) -> Option<&dyn LocalFile> {
5231 if self.local_root.is_some() {
5232 Some(self)
5233 } else {
5234 None
5235 }
5236 }
5237
5238 fn disk_state(&self) -> DiskState {
5239 unimplemented!()
5240 }
5241
5242 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5243 self.path().file_name().unwrap_or(self.root_name.as_ref())
5244 }
5245
5246 fn worktree_id(&self, _: &App) -> WorktreeId {
5247 WorktreeId::from_usize(0)
5248 }
5249
5250 fn to_proto(&self, _: &App) -> rpc::proto::File {
5251 unimplemented!()
5252 }
5253
5254 fn is_private(&self) -> bool {
5255 false
5256 }
5257
5258 fn path_style(&self, _cx: &App) -> PathStyle {
5259 PathStyle::local()
5260 }
5261}
5262
5263#[cfg(any(test, feature = "test-support"))]
5264impl LocalFile for TestFile {
5265 fn abs_path(&self, _cx: &App) -> PathBuf {
5266 PathBuf::from(self.local_root.as_ref().unwrap())
5267 .join(&self.root_name)
5268 .join(self.path.as_std_path())
5269 }
5270
5271 fn load(
5272 &self,
5273 _cx: &App,
5274 _encoding: Encoding,
5275 _force: bool,
5276 _detect_utf16: bool,
5277 _buffer_encoding: Option<Arc<Encoding>>,
5278 ) -> Task<Result<String>> {
5279 unimplemented!()
5280 }
5281
5282 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5283 unimplemented!()
5284 }
5285}
5286
5287pub(crate) fn contiguous_ranges(
5288 values: impl Iterator<Item = u32>,
5289 max_len: usize,
5290) -> impl Iterator<Item = Range<u32>> {
5291 let mut values = values;
5292 let mut current_range: Option<Range<u32>> = None;
5293 std::iter::from_fn(move || {
5294 loop {
5295 if let Some(value) = values.next() {
5296 if let Some(range) = &mut current_range
5297 && value == range.end
5298 && range.len() < max_len
5299 {
5300 range.end += 1;
5301 continue;
5302 }
5303
5304 let prev_range = current_range.clone();
5305 current_range = Some(value..(value + 1));
5306 if prev_range.is_some() {
5307 return prev_range;
5308 }
5309 } else {
5310 return current_range.take();
5311 }
5312 }
5313 })
5314}
5315
5316#[derive(Default, Debug)]
5317pub struct CharClassifier {
5318 scope: Option<LanguageScope>,
5319 scope_context: Option<CharScopeContext>,
5320 ignore_punctuation: bool,
5321}
5322
5323impl CharClassifier {
5324 pub fn new(scope: Option<LanguageScope>) -> Self {
5325 Self {
5326 scope,
5327 scope_context: None,
5328 ignore_punctuation: false,
5329 }
5330 }
5331
5332 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5333 Self {
5334 scope_context,
5335 ..self
5336 }
5337 }
5338
5339 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5340 Self {
5341 ignore_punctuation,
5342 ..self
5343 }
5344 }
5345
5346 pub fn is_whitespace(&self, c: char) -> bool {
5347 self.kind(c) == CharKind::Whitespace
5348 }
5349
5350 pub fn is_word(&self, c: char) -> bool {
5351 self.kind(c) == CharKind::Word
5352 }
5353
5354 pub fn is_punctuation(&self, c: char) -> bool {
5355 self.kind(c) == CharKind::Punctuation
5356 }
5357
5358 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5359 if c.is_alphanumeric() || c == '_' {
5360 return CharKind::Word;
5361 }
5362
5363 if let Some(scope) = &self.scope {
5364 let characters = match self.scope_context {
5365 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5366 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5367 None => scope.word_characters(),
5368 };
5369 if let Some(characters) = characters
5370 && characters.contains(&c)
5371 {
5372 return CharKind::Word;
5373 }
5374 }
5375
5376 if c.is_whitespace() {
5377 return CharKind::Whitespace;
5378 }
5379
5380 if ignore_punctuation {
5381 CharKind::Word
5382 } else {
5383 CharKind::Punctuation
5384 }
5385 }
5386
5387 pub fn kind(&self, c: char) -> CharKind {
5388 self.kind_with(c, self.ignore_punctuation)
5389 }
5390}
5391
5392/// Find all of the ranges of whitespace that occur at the ends of lines
5393/// in the given rope.
5394///
5395/// This could also be done with a regex search, but this implementation
5396/// avoids copying text.
5397pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5398 let mut ranges = Vec::new();
5399
5400 let mut offset = 0;
5401 let mut prev_chunk_trailing_whitespace_range = 0..0;
5402 for chunk in rope.chunks() {
5403 let mut prev_line_trailing_whitespace_range = 0..0;
5404 for (i, line) in chunk.split('\n').enumerate() {
5405 let line_end_offset = offset + line.len();
5406 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5407 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5408
5409 if i == 0 && trimmed_line_len == 0 {
5410 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5411 }
5412 if !prev_line_trailing_whitespace_range.is_empty() {
5413 ranges.push(prev_line_trailing_whitespace_range);
5414 }
5415
5416 offset = line_end_offset + 1;
5417 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5418 }
5419
5420 offset -= 1;
5421 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5422 }
5423
5424 if !prev_chunk_trailing_whitespace_range.is_empty() {
5425 ranges.push(prev_chunk_trailing_whitespace_range);
5426 }
5427
5428 ranges
5429}