1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::{MTime, encodings::EncodingWrapper};
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<std::sync::Mutex<&'static Encoding>>,
131 pub observe_file_encoding: Option<gpui::Subscription>,
132}
133
134#[derive(Copy, Clone, Debug, PartialEq, Eq)]
135pub enum ParseStatus {
136 Idle,
137 Parsing,
138}
139
140struct BufferBranchState {
141 base_buffer: Entity<Buffer>,
142 merged_operations: Vec<Lamport>,
143}
144
145/// An immutable, cheaply cloneable representation of a fixed
146/// state of a buffer.
147pub struct BufferSnapshot {
148 pub text: text::BufferSnapshot,
149 pub syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191impl From<settings::CursorShape> for CursorShape {
192 fn from(shape: settings::CursorShape) -> Self {
193 match shape {
194 settings::CursorShape::Bar => CursorShape::Bar,
195 settings::CursorShape::Block => CursorShape::Block,
196 settings::CursorShape::Underline => CursorShape::Underline,
197 settings::CursorShape::Hollow => CursorShape::Hollow,
198 }
199 }
200}
201
202#[derive(Clone, Debug)]
203struct SelectionSet {
204 line_mode: bool,
205 cursor_shape: CursorShape,
206 selections: Arc<[Selection<Anchor>]>,
207 lamport_timestamp: clock::Lamport,
208}
209
210/// A diagnostic associated with a certain range of a buffer.
211#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
212pub struct Diagnostic {
213 /// The name of the service that produced this diagnostic.
214 pub source: Option<String>,
215 /// A machine-readable code that identifies this diagnostic.
216 pub code: Option<NumberOrString>,
217 pub code_description: Option<lsp::Uri>,
218 /// Whether this diagnostic is a hint, warning, or error.
219 pub severity: DiagnosticSeverity,
220 /// The human-readable message associated with this diagnostic.
221 pub message: String,
222 /// The human-readable message (in markdown format)
223 pub markdown: Option<String>,
224 /// An id that identifies the group to which this diagnostic belongs.
225 ///
226 /// When a language server produces a diagnostic with
227 /// one or more associated diagnostics, those diagnostics are all
228 /// assigned a single group ID.
229 pub group_id: usize,
230 /// Whether this diagnostic is the primary diagnostic for its group.
231 ///
232 /// In a given group, the primary diagnostic is the top-level diagnostic
233 /// returned by the language server. The non-primary diagnostics are the
234 /// associated diagnostics.
235 pub is_primary: bool,
236 /// Whether this diagnostic is considered to originate from an analysis of
237 /// files on disk, as opposed to any unsaved buffer contents. This is a
238 /// property of a given diagnostic source, and is configured for a given
239 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
240 /// for the language server.
241 pub is_disk_based: bool,
242 /// Whether this diagnostic marks unnecessary code.
243 pub is_unnecessary: bool,
244 /// Quick separation of diagnostics groups based by their source.
245 pub source_kind: DiagnosticSourceKind,
246 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
247 pub data: Option<Value>,
248 /// Whether to underline the corresponding text range in the editor.
249 pub underline: bool,
250}
251
252#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
253pub enum DiagnosticSourceKind {
254 Pulled,
255 Pushed,
256 Other,
257}
258
259/// An operation used to synchronize this buffer with its other replicas.
260#[derive(Clone, Debug, PartialEq)]
261pub enum Operation {
262 /// A text operation.
263 Buffer(text::Operation),
264
265 /// An update to the buffer's diagnostics.
266 UpdateDiagnostics {
267 /// The id of the language server that produced the new diagnostics.
268 server_id: LanguageServerId,
269 /// The diagnostics.
270 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
271 /// The buffer's lamport timestamp.
272 lamport_timestamp: clock::Lamport,
273 },
274
275 /// An update to the most recent selections in this buffer.
276 UpdateSelections {
277 /// The selections.
278 selections: Arc<[Selection<Anchor>]>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// Whether the selections are in 'line mode'.
282 line_mode: bool,
283 /// The [`CursorShape`] associated with these selections.
284 cursor_shape: CursorShape,
285 },
286
287 /// An update to the characters that should trigger autocompletion
288 /// for this buffer.
289 UpdateCompletionTriggers {
290 /// The characters that trigger autocompletion.
291 triggers: Vec<String>,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 /// The language server ID.
295 server_id: LanguageServerId,
296 },
297
298 /// An update to the line ending type of this buffer.
299 UpdateLineEnding {
300 /// The line ending type.
301 line_ending: LineEnding,
302 /// The buffer's lamport timestamp.
303 lamport_timestamp: clock::Lamport,
304 },
305}
306
307/// An event that occurs in a buffer.
308#[derive(Clone, Debug, PartialEq)]
309pub enum BufferEvent {
310 /// The buffer was changed in a way that must be
311 /// propagated to its other replicas.
312 Operation {
313 operation: Operation,
314 is_local: bool,
315 },
316 /// The buffer was edited.
317 Edited,
318 /// The buffer's `dirty` bit changed.
319 DirtyChanged,
320 /// The buffer was saved.
321 Saved,
322 /// The buffer's file was changed on disk.
323 FileHandleChanged,
324 /// The buffer was reloaded.
325 Reloaded,
326 /// The buffer is in need of a reload
327 ReloadNeeded,
328 /// The buffer's language was changed.
329 LanguageChanged,
330 /// The buffer's syntax trees were updated.
331 Reparsed,
332 /// The buffer's diagnostics were updated.
333 DiagnosticsUpdated,
334 /// The buffer gained or lost editing capabilities.
335 CapabilityChanged,
336}
337
338/// The file associated with a buffer.
339pub trait File: Send + Sync + Any {
340 /// Returns the [`LocalFile`] associated with this file, if the
341 /// file is local.
342 fn as_local(&self) -> Option<&dyn LocalFile>;
343
344 /// Returns whether this file is local.
345 fn is_local(&self) -> bool {
346 self.as_local().is_some()
347 }
348
349 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
350 /// only available in some states, such as modification time.
351 fn disk_state(&self) -> DiskState;
352
353 /// Returns the path of this file relative to the worktree's root directory.
354 fn path(&self) -> &Arc<RelPath>;
355
356 /// Returns the path of this file relative to the worktree's parent directory (this means it
357 /// includes the name of the worktree's root folder).
358 fn full_path(&self, cx: &App) -> PathBuf;
359
360 /// Returns the path style of this file.
361 fn path_style(&self, cx: &App) -> PathStyle;
362
363 /// Returns the last component of this handle's absolute path. If this handle refers to the root
364 /// of its worktree, then this method will return the name of the worktree itself.
365 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
366
367 /// Returns the id of the worktree to which this file belongs.
368 ///
369 /// This is needed for looking up project-specific settings.
370 fn worktree_id(&self, cx: &App) -> WorktreeId;
371
372 /// Converts this file into a protobuf message.
373 fn to_proto(&self, cx: &App) -> rpc::proto::File;
374
375 /// Return whether Zed considers this to be a private file.
376 fn is_private(&self) -> bool;
377
378 fn encoding(&self) -> Option<Arc<std::sync::Mutex<&'static Encoding>>> {
379 unimplemented!()
380 }
381}
382
383/// The file's storage status - whether it's stored (`Present`), and if so when it was last
384/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
385/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
386/// indicator for new files.
387#[derive(Copy, Clone, Debug, PartialEq)]
388pub enum DiskState {
389 /// File created in Zed that has not been saved.
390 New,
391 /// File present on the filesystem.
392 Present { mtime: MTime },
393 /// Deleted file that was previously present.
394 Deleted,
395}
396
397impl DiskState {
398 /// Returns the file's last known modification time on disk.
399 pub fn mtime(self) -> Option<MTime> {
400 match self {
401 DiskState::New => None,
402 DiskState::Present { mtime } => Some(mtime),
403 DiskState::Deleted => None,
404 }
405 }
406
407 pub fn exists(&self) -> bool {
408 match self {
409 DiskState::New => false,
410 DiskState::Present { .. } => true,
411 DiskState::Deleted => false,
412 }
413 }
414}
415
416/// The file associated with a buffer, in the case where the file is on the local disk.
417pub trait LocalFile: File {
418 /// Returns the absolute path of this file
419 fn abs_path(&self, cx: &App) -> PathBuf;
420
421 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
422 fn load(
423 &self,
424 cx: &App,
425 encoding: EncodingWrapper,
426 force: bool,
427 detect_utf16: bool,
428 buffer_encoding: Option<Arc<std::sync::Mutex<&'static Encoding>>>,
429 ) -> Task<Result<String>>;
430
431 /// Loads the file's contents from disk.
432 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
433}
434
435/// The auto-indent behavior associated with an editing operation.
436/// For some editing operations, each affected line of text has its
437/// indentation recomputed. For other operations, the entire block
438/// of edited text is adjusted uniformly.
439#[derive(Clone, Debug)]
440pub enum AutoindentMode {
441 /// Indent each line of inserted text.
442 EachLine,
443 /// Apply the same indentation adjustment to all of the lines
444 /// in a given insertion.
445 Block {
446 /// The original indentation column of the first line of each
447 /// insertion, if it has been copied.
448 ///
449 /// Knowing this makes it possible to preserve the relative indentation
450 /// of every line in the insertion from when it was copied.
451 ///
452 /// If the original indent column is `a`, and the first line of insertion
453 /// is then auto-indented to column `b`, then every other line of
454 /// the insertion will be auto-indented to column `b - a`
455 original_indent_columns: Vec<Option<u32>>,
456 },
457}
458
459#[derive(Clone)]
460struct AutoindentRequest {
461 before_edit: BufferSnapshot,
462 entries: Vec<AutoindentRequestEntry>,
463 is_block_mode: bool,
464 ignore_empty_lines: bool,
465}
466
467#[derive(Debug, Clone)]
468struct AutoindentRequestEntry {
469 /// A range of the buffer whose indentation should be adjusted.
470 range: Range<Anchor>,
471 /// Whether or not these lines should be considered brand new, for the
472 /// purpose of auto-indent. When text is not new, its indentation will
473 /// only be adjusted if the suggested indentation level has *changed*
474 /// since the edit was made.
475 first_line_is_new: bool,
476 indent_size: IndentSize,
477 original_indent_column: Option<u32>,
478}
479
480#[derive(Debug)]
481struct IndentSuggestion {
482 basis_row: u32,
483 delta: Ordering,
484 within_error: bool,
485}
486
487struct BufferChunkHighlights<'a> {
488 captures: SyntaxMapCaptures<'a>,
489 next_capture: Option<SyntaxMapCapture<'a>>,
490 stack: Vec<(usize, HighlightId)>,
491 highlight_maps: Vec<HighlightMap>,
492}
493
494/// An iterator that yields chunks of a buffer's text, along with their
495/// syntax highlights and diagnostic status.
496pub struct BufferChunks<'a> {
497 buffer_snapshot: Option<&'a BufferSnapshot>,
498 range: Range<usize>,
499 chunks: text::Chunks<'a>,
500 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
501 error_depth: usize,
502 warning_depth: usize,
503 information_depth: usize,
504 hint_depth: usize,
505 unnecessary_depth: usize,
506 underline: bool,
507 highlights: Option<BufferChunkHighlights<'a>>,
508}
509
510/// A chunk of a buffer's text, along with its syntax highlight and
511/// diagnostic status.
512#[derive(Clone, Debug, Default)]
513pub struct Chunk<'a> {
514 /// The text of the chunk.
515 pub text: &'a str,
516 /// The syntax highlighting style of the chunk.
517 pub syntax_highlight_id: Option<HighlightId>,
518 /// The highlight style that has been applied to this chunk in
519 /// the editor.
520 pub highlight_style: Option<HighlightStyle>,
521 /// The severity of diagnostic associated with this chunk, if any.
522 pub diagnostic_severity: Option<DiagnosticSeverity>,
523 /// A bitset of which characters are tabs in this string.
524 pub tabs: u128,
525 /// Bitmap of character indices in this chunk
526 pub chars: u128,
527 /// Whether this chunk of text is marked as unnecessary.
528 pub is_unnecessary: bool,
529 /// Whether this chunk of text was originally a tab character.
530 pub is_tab: bool,
531 /// Whether this chunk of text was originally an inlay.
532 pub is_inlay: bool,
533 /// Whether to underline the corresponding text range in the editor.
534 pub underline: bool,
535}
536
537/// A set of edits to a given version of a buffer, computed asynchronously.
538#[derive(Debug)]
539pub struct Diff {
540 pub base_version: clock::Global,
541 pub line_ending: LineEnding,
542 pub edits: Vec<(Range<usize>, Arc<str>)>,
543}
544
545#[derive(Debug, Clone, Copy)]
546pub(crate) struct DiagnosticEndpoint {
547 offset: usize,
548 is_start: bool,
549 underline: bool,
550 severity: DiagnosticSeverity,
551 is_unnecessary: bool,
552}
553
554/// A class of characters, used for characterizing a run of text.
555#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
556pub enum CharKind {
557 /// Whitespace.
558 Whitespace,
559 /// Punctuation.
560 Punctuation,
561 /// Word.
562 Word,
563}
564
565/// Context for character classification within a specific scope.
566#[derive(Copy, Clone, Eq, PartialEq, Debug)]
567pub enum CharScopeContext {
568 /// Character classification for completion queries.
569 ///
570 /// This context treats certain characters as word constituents that would
571 /// normally be considered punctuation, such as '-' in Tailwind classes
572 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
573 Completion,
574 /// Character classification for linked edits.
575 ///
576 /// This context handles characters that should be treated as part of
577 /// identifiers during linked editing operations, such as '.' in JSX
578 /// component names like `<Animated.View>`.
579 LinkedEdit,
580}
581
582/// A runnable is a set of data about a region that could be resolved into a task
583pub struct Runnable {
584 pub tags: SmallVec<[RunnableTag; 1]>,
585 pub language: Arc<Language>,
586 pub buffer: BufferId,
587}
588
589#[derive(Default, Clone, Debug)]
590pub struct HighlightedText {
591 pub text: SharedString,
592 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
593}
594
595#[derive(Default, Debug)]
596struct HighlightedTextBuilder {
597 pub text: String,
598 highlights: Vec<(Range<usize>, HighlightStyle)>,
599}
600
601impl HighlightedText {
602 pub fn from_buffer_range<T: ToOffset>(
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) -> Self {
609 let mut highlighted_text = HighlightedTextBuilder::default();
610 highlighted_text.add_text_from_buffer_range(
611 range,
612 snapshot,
613 syntax_snapshot,
614 override_style,
615 syntax_theme,
616 );
617 highlighted_text.build()
618 }
619
620 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
621 gpui::StyledText::new(self.text.clone())
622 .with_default_highlights(default_style, self.highlights.iter().cloned())
623 }
624
625 /// Returns the first line without leading whitespace unless highlighted
626 /// and a boolean indicating if there are more lines after
627 pub fn first_line_preview(self) -> (Self, bool) {
628 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
629 let first_line = &self.text[..newline_ix];
630
631 // Trim leading whitespace, unless an edit starts prior to it.
632 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
633 if let Some((first_highlight_range, _)) = self.highlights.first() {
634 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
635 }
636
637 let preview_text = &first_line[preview_start_ix..];
638 let preview_highlights = self
639 .highlights
640 .into_iter()
641 .skip_while(|(range, _)| range.end <= preview_start_ix)
642 .take_while(|(range, _)| range.start < newline_ix)
643 .filter_map(|(mut range, highlight)| {
644 range.start = range.start.saturating_sub(preview_start_ix);
645 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
646 if range.is_empty() {
647 None
648 } else {
649 Some((range, highlight))
650 }
651 });
652
653 let preview = Self {
654 text: SharedString::new(preview_text),
655 highlights: preview_highlights.collect(),
656 };
657
658 (preview, self.text.len() > newline_ix)
659 }
660}
661
662impl HighlightedTextBuilder {
663 pub fn build(self) -> HighlightedText {
664 HighlightedText {
665 text: self.text.into(),
666 highlights: self.highlights,
667 }
668 }
669
670 pub fn add_text_from_buffer_range<T: ToOffset>(
671 &mut self,
672 range: Range<T>,
673 snapshot: &text::BufferSnapshot,
674 syntax_snapshot: &SyntaxSnapshot,
675 override_style: Option<HighlightStyle>,
676 syntax_theme: &SyntaxTheme,
677 ) {
678 let range = range.to_offset(snapshot);
679 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
680 let start = self.text.len();
681 self.text.push_str(chunk.text);
682 let end = self.text.len();
683
684 if let Some(highlight_style) = chunk
685 .syntax_highlight_id
686 .and_then(|id| id.style(syntax_theme))
687 {
688 let highlight_style = override_style.map_or(highlight_style, |override_style| {
689 highlight_style.highlight(override_style)
690 });
691 self.highlights.push((start..end, highlight_style));
692 } else if let Some(override_style) = override_style {
693 self.highlights.push((start..end, override_style));
694 }
695 }
696 }
697
698 fn highlighted_chunks<'a>(
699 range: Range<usize>,
700 snapshot: &'a text::BufferSnapshot,
701 syntax_snapshot: &'a SyntaxSnapshot,
702 ) -> BufferChunks<'a> {
703 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
704 grammar
705 .highlights_config
706 .as_ref()
707 .map(|config| &config.query)
708 });
709
710 let highlight_maps = captures
711 .grammars()
712 .iter()
713 .map(|grammar| grammar.highlight_map())
714 .collect();
715
716 BufferChunks::new(
717 snapshot.as_rope(),
718 range,
719 Some((captures, highlight_maps)),
720 false,
721 None,
722 )
723 }
724}
725
726#[derive(Clone)]
727pub struct EditPreview {
728 old_snapshot: text::BufferSnapshot,
729 applied_edits_snapshot: text::BufferSnapshot,
730 syntax_snapshot: SyntaxSnapshot,
731}
732
733impl EditPreview {
734 pub fn highlight_edits(
735 &self,
736 current_snapshot: &BufferSnapshot,
737 edits: &[(Range<Anchor>, String)],
738 include_deletions: bool,
739 cx: &App,
740 ) -> HighlightedText {
741 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
742 return HighlightedText::default();
743 };
744
745 let mut highlighted_text = HighlightedTextBuilder::default();
746
747 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
748
749 let insertion_highlight_style = HighlightStyle {
750 background_color: Some(cx.theme().status().created_background),
751 ..Default::default()
752 };
753 let deletion_highlight_style = HighlightStyle {
754 background_color: Some(cx.theme().status().deleted_background),
755 ..Default::default()
756 };
757 let syntax_theme = cx.theme().syntax();
758
759 for (range, edit_text) in edits {
760 let edit_new_end_in_preview_snapshot = range
761 .end
762 .bias_right(&self.old_snapshot)
763 .to_offset(&self.applied_edits_snapshot);
764 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
765
766 let unchanged_range_in_preview_snapshot =
767 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
768 if !unchanged_range_in_preview_snapshot.is_empty() {
769 highlighted_text.add_text_from_buffer_range(
770 unchanged_range_in_preview_snapshot,
771 &self.applied_edits_snapshot,
772 &self.syntax_snapshot,
773 None,
774 syntax_theme,
775 );
776 }
777
778 let range_in_current_snapshot = range.to_offset(current_snapshot);
779 if include_deletions && !range_in_current_snapshot.is_empty() {
780 highlighted_text.add_text_from_buffer_range(
781 range_in_current_snapshot,
782 ¤t_snapshot.text,
783 ¤t_snapshot.syntax,
784 Some(deletion_highlight_style),
785 syntax_theme,
786 );
787 }
788
789 if !edit_text.is_empty() {
790 highlighted_text.add_text_from_buffer_range(
791 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
792 &self.applied_edits_snapshot,
793 &self.syntax_snapshot,
794 Some(insertion_highlight_style),
795 syntax_theme,
796 );
797 }
798
799 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
800 }
801
802 highlighted_text.add_text_from_buffer_range(
803 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
804 &self.applied_edits_snapshot,
805 &self.syntax_snapshot,
806 None,
807 syntax_theme,
808 );
809
810 highlighted_text.build()
811 }
812
813 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
814 let (first, _) = edits.first()?;
815 let (last, _) = edits.last()?;
816
817 let start = first
818 .start
819 .bias_left(&self.old_snapshot)
820 .to_point(&self.applied_edits_snapshot);
821 let end = last
822 .end
823 .bias_right(&self.old_snapshot)
824 .to_point(&self.applied_edits_snapshot);
825
826 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
827 let range = Point::new(start.row, 0)
828 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
829
830 Some(range.to_offset(&self.applied_edits_snapshot))
831 }
832}
833
834#[derive(Clone, Debug, PartialEq, Eq)]
835pub struct BracketMatch {
836 pub open_range: Range<usize>,
837 pub close_range: Range<usize>,
838 pub newline_only: bool,
839}
840
841impl Buffer {
842 /// Create a new buffer with the given base text.
843 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
844 Self::build(
845 TextBuffer::new(
846 ReplicaId::LOCAL,
847 cx.entity_id().as_non_zero_u64().into(),
848 base_text.into(),
849 &cx.background_executor(),
850 ),
851 None,
852 Capability::ReadWrite,
853 )
854 }
855
856 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
857 /// change the buffer's editing state
858 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
859 self.text = new;
860 self.saved_version = self.version.clone();
861 self.has_unsaved_edits.set((self.version.clone(), false));
862
863 self.was_changed();
864 cx.emit(BufferEvent::DirtyChanged);
865 cx.notify();
866 }
867
868 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
869 pub fn local_normalized(
870 base_text_normalized: Rope,
871 line_ending: LineEnding,
872 cx: &Context<Self>,
873 ) -> Self {
874 Self::build(
875 TextBuffer::new_normalized(
876 ReplicaId::LOCAL,
877 cx.entity_id().as_non_zero_u64().into(),
878 line_ending,
879 base_text_normalized,
880 ),
881 None,
882 Capability::ReadWrite,
883 )
884 }
885
886 /// Create a new buffer that is a replica of a remote buffer.
887 pub fn remote(
888 remote_id: BufferId,
889 replica_id: ReplicaId,
890 capability: Capability,
891 base_text: impl Into<String>,
892 cx: &BackgroundExecutor,
893 ) -> Self {
894 Self::build(
895 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
896 None,
897 capability,
898 )
899 }
900
901 /// Create a new buffer that is a replica of a remote buffer, populating its
902 /// state from the given protobuf message.
903 pub fn from_proto(
904 replica_id: ReplicaId,
905 capability: Capability,
906 message: proto::BufferState,
907 file: Option<Arc<dyn File>>,
908 cx: &BackgroundExecutor,
909 ) -> Result<Self> {
910 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
911 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
912 let mut this = Self::build(buffer, file, capability);
913 this.text.set_line_ending(proto::deserialize_line_ending(
914 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
915 ));
916 this.saved_version = proto::deserialize_version(&message.saved_version);
917 this.saved_mtime = message.saved_mtime.map(|time| time.into());
918 Ok(this)
919 }
920
921 /// Serialize the buffer's state to a protobuf message.
922 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
923 proto::BufferState {
924 id: self.remote_id().into(),
925 file: self.file.as_ref().map(|f| f.to_proto(cx)),
926 base_text: self.base_text().to_string(),
927 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
928 saved_version: proto::serialize_version(&self.saved_version),
929 saved_mtime: self.saved_mtime.map(|time| time.into()),
930 }
931 }
932
933 /// Serialize as protobufs all of the changes to the buffer since the given version.
934 pub fn serialize_ops(
935 &self,
936 since: Option<clock::Global>,
937 cx: &App,
938 ) -> Task<Vec<proto::Operation>> {
939 let mut operations = Vec::new();
940 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
941
942 operations.extend(self.remote_selections.iter().map(|(_, set)| {
943 proto::serialize_operation(&Operation::UpdateSelections {
944 selections: set.selections.clone(),
945 lamport_timestamp: set.lamport_timestamp,
946 line_mode: set.line_mode,
947 cursor_shape: set.cursor_shape,
948 })
949 }));
950
951 for (server_id, diagnostics) in &self.diagnostics {
952 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
953 lamport_timestamp: self.diagnostics_timestamp,
954 server_id: *server_id,
955 diagnostics: diagnostics.iter().cloned().collect(),
956 }));
957 }
958
959 for (server_id, completions) in &self.completion_triggers_per_language_server {
960 operations.push(proto::serialize_operation(
961 &Operation::UpdateCompletionTriggers {
962 triggers: completions.iter().cloned().collect(),
963 lamport_timestamp: self.completion_triggers_timestamp,
964 server_id: *server_id,
965 },
966 ));
967 }
968
969 let text_operations = self.text.operations().clone();
970 cx.background_spawn(async move {
971 let since = since.unwrap_or_default();
972 operations.extend(
973 text_operations
974 .iter()
975 .filter(|(_, op)| !since.observed(op.timestamp()))
976 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
977 );
978 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
979 operations
980 })
981 }
982
983 /// Assign a language to the buffer, returning the buffer.
984 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
985 self.set_language(Some(language), cx);
986 self
987 }
988
989 /// Returns the [`Capability`] of this buffer.
990 pub fn capability(&self) -> Capability {
991 self.capability
992 }
993
994 /// Whether this buffer can only be read.
995 pub fn read_only(&self) -> bool {
996 self.capability == Capability::ReadOnly
997 }
998
999 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1000 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1001 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1002 let snapshot = buffer.snapshot();
1003 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1004 Self {
1005 saved_mtime,
1006 saved_version: buffer.version(),
1007 preview_version: buffer.version(),
1008 reload_task: None,
1009 transaction_depth: 0,
1010 was_dirty_before_starting_transaction: None,
1011 has_unsaved_edits: Cell::new((buffer.version(), false)),
1012 text: buffer,
1013 branch_state: None,
1014 file,
1015 capability,
1016 syntax_map,
1017 reparse: None,
1018 non_text_state_update_count: 0,
1019 sync_parse_timeout: Duration::from_millis(1),
1020 parse_status: watch::channel(ParseStatus::Idle),
1021 autoindent_requests: Default::default(),
1022 wait_for_autoindent_txs: Default::default(),
1023 pending_autoindent: Default::default(),
1024 language: None,
1025 remote_selections: Default::default(),
1026 diagnostics: Default::default(),
1027 diagnostics_timestamp: Lamport::MIN,
1028 completion_triggers: Default::default(),
1029 completion_triggers_per_language_server: Default::default(),
1030 completion_triggers_timestamp: Lamport::MIN,
1031 deferred_ops: OperationQueue::new(),
1032 has_conflict: false,
1033 change_bits: Default::default(),
1034 _subscriptions: Vec::new(),
1035 encoding: Arc::new(std::sync::Mutex::new(encoding_rs::UTF_8)),
1036 observe_file_encoding: None,
1037 }
1038 }
1039
1040 pub fn build_snapshot(
1041 text: Rope,
1042 language: Option<Arc<Language>>,
1043 language_registry: Option<Arc<LanguageRegistry>>,
1044 cx: &mut App,
1045 ) -> impl Future<Output = BufferSnapshot> + use<> {
1046 let entity_id = cx.reserve_entity::<Self>().entity_id();
1047 let buffer_id = entity_id.as_non_zero_u64().into();
1048 async move {
1049 let text =
1050 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1051 .snapshot();
1052 let mut syntax = SyntaxMap::new(&text).snapshot();
1053 if let Some(language) = language.clone() {
1054 let language_registry = language_registry.clone();
1055 syntax.reparse(&text, language_registry, language);
1056 }
1057 BufferSnapshot {
1058 text,
1059 syntax,
1060 file: None,
1061 diagnostics: Default::default(),
1062 remote_selections: Default::default(),
1063 language,
1064 non_text_state_update_count: 0,
1065 }
1066 }
1067 }
1068
1069 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1070 let entity_id = cx.reserve_entity::<Self>().entity_id();
1071 let buffer_id = entity_id.as_non_zero_u64().into();
1072 let text = TextBuffer::new_normalized(
1073 ReplicaId::LOCAL,
1074 buffer_id,
1075 Default::default(),
1076 Rope::new(),
1077 )
1078 .snapshot();
1079 let syntax = SyntaxMap::new(&text).snapshot();
1080 BufferSnapshot {
1081 text,
1082 syntax,
1083 file: None,
1084 diagnostics: Default::default(),
1085 remote_selections: Default::default(),
1086 language: None,
1087 non_text_state_update_count: 0,
1088 }
1089 }
1090
1091 #[cfg(any(test, feature = "test-support"))]
1092 pub fn build_snapshot_sync(
1093 text: Rope,
1094 language: Option<Arc<Language>>,
1095 language_registry: Option<Arc<LanguageRegistry>>,
1096 cx: &mut App,
1097 ) -> BufferSnapshot {
1098 let entity_id = cx.reserve_entity::<Self>().entity_id();
1099 let buffer_id = entity_id.as_non_zero_u64().into();
1100 let text =
1101 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1102 .snapshot();
1103 let mut syntax = SyntaxMap::new(&text).snapshot();
1104 if let Some(language) = language.clone() {
1105 syntax.reparse(&text, language_registry, language);
1106 }
1107 BufferSnapshot {
1108 text,
1109 syntax,
1110 file: None,
1111 diagnostics: Default::default(),
1112 remote_selections: Default::default(),
1113 language,
1114 non_text_state_update_count: 0,
1115 }
1116 }
1117
1118 /// Retrieve a snapshot of the buffer's current state. This is computationally
1119 /// cheap, and allows reading from the buffer on a background thread.
1120 pub fn snapshot(&self) -> BufferSnapshot {
1121 let text = self.text.snapshot();
1122 let mut syntax_map = self.syntax_map.lock();
1123 syntax_map.interpolate(&text);
1124 let syntax = syntax_map.snapshot();
1125
1126 BufferSnapshot {
1127 text,
1128 syntax,
1129 file: self.file.clone(),
1130 remote_selections: self.remote_selections.clone(),
1131 diagnostics: self.diagnostics.clone(),
1132 language: self.language.clone(),
1133 non_text_state_update_count: self.non_text_state_update_count,
1134 }
1135 }
1136
1137 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1138 let this = cx.entity();
1139 cx.new(|cx| {
1140 let mut branch = Self {
1141 branch_state: Some(BufferBranchState {
1142 base_buffer: this.clone(),
1143 merged_operations: Default::default(),
1144 }),
1145 language: self.language.clone(),
1146 has_conflict: self.has_conflict,
1147 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1148 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1149 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1150 };
1151 if let Some(language_registry) = self.language_registry() {
1152 branch.set_language_registry(language_registry);
1153 }
1154
1155 // Reparse the branch buffer so that we get syntax highlighting immediately.
1156 branch.reparse(cx);
1157
1158 branch
1159 })
1160 }
1161
1162 pub fn preview_edits(
1163 &self,
1164 edits: Arc<[(Range<Anchor>, String)]>,
1165 cx: &App,
1166 ) -> Task<EditPreview> {
1167 let registry = self.language_registry();
1168 let language = self.language().cloned();
1169 let old_snapshot = self.text.snapshot();
1170 let mut branch_buffer = self.text.branch();
1171 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1172 let executor = cx.background_executor().clone();
1173 cx.background_spawn(async move {
1174 if !edits.is_empty() {
1175 if let Some(language) = language.clone() {
1176 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1177 }
1178
1179 branch_buffer.edit(edits.iter().cloned(), &executor);
1180 let snapshot = branch_buffer.snapshot();
1181 syntax_snapshot.interpolate(&snapshot);
1182
1183 if let Some(language) = language {
1184 syntax_snapshot.reparse(&snapshot, registry, language);
1185 }
1186 }
1187 EditPreview {
1188 old_snapshot,
1189 applied_edits_snapshot: branch_buffer.snapshot(),
1190 syntax_snapshot,
1191 }
1192 })
1193 }
1194
1195 /// Applies all of the changes in this buffer that intersect any of the
1196 /// given `ranges` to its base buffer.
1197 ///
1198 /// If `ranges` is empty, then all changes will be applied. This buffer must
1199 /// be a branch buffer to call this method.
1200 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1201 let Some(base_buffer) = self.base_buffer() else {
1202 debug_panic!("not a branch buffer");
1203 return;
1204 };
1205
1206 let mut ranges = if ranges.is_empty() {
1207 &[0..usize::MAX]
1208 } else {
1209 ranges.as_slice()
1210 }
1211 .iter()
1212 .peekable();
1213
1214 let mut edits = Vec::new();
1215 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1216 let mut is_included = false;
1217 while let Some(range) = ranges.peek() {
1218 if range.end < edit.new.start {
1219 ranges.next().unwrap();
1220 } else {
1221 if range.start <= edit.new.end {
1222 is_included = true;
1223 }
1224 break;
1225 }
1226 }
1227
1228 if is_included {
1229 edits.push((
1230 edit.old.clone(),
1231 self.text_for_range(edit.new.clone()).collect::<String>(),
1232 ));
1233 }
1234 }
1235
1236 let operation = base_buffer.update(cx, |base_buffer, cx| {
1237 // cx.emit(BufferEvent::DiffBaseChanged);
1238 base_buffer.edit(edits, None, cx)
1239 });
1240
1241 if let Some(operation) = operation
1242 && let Some(BufferBranchState {
1243 merged_operations, ..
1244 }) = &mut self.branch_state
1245 {
1246 merged_operations.push(operation);
1247 }
1248 }
1249
1250 fn on_base_buffer_event(
1251 &mut self,
1252 _: Entity<Buffer>,
1253 event: &BufferEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 let BufferEvent::Operation { operation, .. } = event else {
1257 return;
1258 };
1259 let Some(BufferBranchState {
1260 merged_operations, ..
1261 }) = &mut self.branch_state
1262 else {
1263 return;
1264 };
1265
1266 let mut operation_to_undo = None;
1267 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1268 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1269 {
1270 merged_operations.remove(ix);
1271 operation_to_undo = Some(operation.timestamp);
1272 }
1273
1274 self.apply_ops([operation.clone()], cx);
1275
1276 if let Some(timestamp) = operation_to_undo {
1277 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1278 self.undo_operations(counts, cx);
1279 }
1280 }
1281
1282 #[cfg(test)]
1283 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1284 &self.text
1285 }
1286
1287 /// Retrieve a snapshot of the buffer's raw text, without any
1288 /// language-related state like the syntax tree or diagnostics.
1289 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1290 self.text.snapshot()
1291 }
1292
1293 /// The file associated with the buffer, if any.
1294 pub fn file(&self) -> Option<&Arc<dyn File>> {
1295 self.file.as_ref()
1296 }
1297
1298 /// The version of the buffer that was last saved or reloaded from disk.
1299 pub fn saved_version(&self) -> &clock::Global {
1300 &self.saved_version
1301 }
1302
1303 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1304 pub fn saved_mtime(&self) -> Option<MTime> {
1305 self.saved_mtime
1306 }
1307
1308 /// Assign a language to the buffer.
1309 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1310 self.non_text_state_update_count += 1;
1311 self.syntax_map.lock().clear(&self.text);
1312 self.language = language;
1313 self.was_changed();
1314 self.reparse(cx);
1315 cx.emit(BufferEvent::LanguageChanged);
1316 }
1317
1318 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1319 /// other languages if parts of the buffer are written in different languages.
1320 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1321 self.syntax_map
1322 .lock()
1323 .set_language_registry(language_registry);
1324 }
1325
1326 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1327 self.syntax_map.lock().language_registry()
1328 }
1329
1330 /// Assign the line ending type to the buffer.
1331 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1332 self.text.set_line_ending(line_ending);
1333
1334 let lamport_timestamp = self.text.lamport_clock.tick();
1335 self.send_operation(
1336 Operation::UpdateLineEnding {
1337 line_ending,
1338 lamport_timestamp,
1339 },
1340 true,
1341 cx,
1342 );
1343 }
1344
1345 /// Assign the buffer a new [`Capability`].
1346 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1347 if self.capability != capability {
1348 self.capability = capability;
1349 cx.emit(BufferEvent::CapabilityChanged)
1350 }
1351 }
1352
1353 /// This method is called to signal that the buffer has been saved.
1354 pub fn did_save(
1355 &mut self,
1356 version: clock::Global,
1357 mtime: Option<MTime>,
1358 cx: &mut Context<Self>,
1359 ) {
1360 self.saved_version = version.clone();
1361 self.has_unsaved_edits.set((version, false));
1362 self.has_conflict = false;
1363 self.saved_mtime = mtime;
1364 self.was_changed();
1365 cx.emit(BufferEvent::Saved);
1366 cx.notify();
1367 }
1368
1369 /// Reloads the contents of the buffer from disk.
1370 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1371 let (tx, rx) = futures::channel::oneshot::channel();
1372 let encoding = EncodingWrapper::new(*(self.encoding.lock().unwrap()));
1373 let buffer_encoding = self.encoding.clone();
1374
1375 let prev_version = self.text.version();
1376 self.reload_task = Some(cx.spawn(async move |this, cx| {
1377 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1378 let file = this.file.as_ref()?.as_local()?;
1379 Some((file.disk_state().mtime(), {
1380 file.load(cx, encoding, false, true, Some(buffer_encoding))
1381 }))
1382 })?
1383 else {
1384 return Ok(());
1385 };
1386
1387 let new_text = new_text.await?;
1388 let diff = this
1389 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1390 .await;
1391 this.update(cx, |this, cx| {
1392 if this.version() == diff.base_version {
1393 this.finalize_last_transaction();
1394 this.apply_diff(diff, cx);
1395 tx.send(this.finalize_last_transaction().cloned()).ok();
1396 this.has_conflict = false;
1397 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1398 } else {
1399 if !diff.edits.is_empty()
1400 || this
1401 .edits_since::<usize>(&diff.base_version)
1402 .next()
1403 .is_some()
1404 {
1405 this.has_conflict = true;
1406 }
1407
1408 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1409 }
1410
1411 this.reload_task.take();
1412 })
1413 }));
1414 rx
1415 }
1416
1417 /// This method is called to signal that the buffer has been reloaded.
1418 pub fn did_reload(
1419 &mut self,
1420 version: clock::Global,
1421 line_ending: LineEnding,
1422 mtime: Option<MTime>,
1423 cx: &mut Context<Self>,
1424 ) {
1425 self.saved_version = version;
1426 self.has_unsaved_edits
1427 .set((self.saved_version.clone(), false));
1428 self.text.set_line_ending(line_ending);
1429 self.saved_mtime = mtime;
1430 cx.emit(BufferEvent::Reloaded);
1431 cx.notify();
1432 }
1433
1434 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1435 self.file = Some(new_file);
1436 }
1437 /// Updates the [`File`] backing this buffer. This should be called when
1438 /// the file has changed or has been deleted.
1439 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1440 let was_dirty = self.is_dirty();
1441 let mut file_changed = false;
1442
1443 if let Some(old_file) = self.file.as_ref() {
1444 if new_file.path() != old_file.path() {
1445 file_changed = true;
1446 }
1447
1448 let old_state = old_file.disk_state();
1449 let new_state = new_file.disk_state();
1450 if old_state != new_state {
1451 file_changed = true;
1452 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1453 cx.emit(BufferEvent::ReloadNeeded)
1454 }
1455 }
1456 } else {
1457 file_changed = true;
1458 };
1459
1460 self.file = Some(new_file);
1461 if file_changed {
1462 self.was_changed();
1463 self.non_text_state_update_count += 1;
1464 if was_dirty != self.is_dirty() {
1465 cx.emit(BufferEvent::DirtyChanged);
1466 }
1467 cx.emit(BufferEvent::FileHandleChanged);
1468 cx.notify();
1469 }
1470 }
1471
1472 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1473 Some(self.branch_state.as_ref()?.base_buffer.clone())
1474 }
1475
1476 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1477 pub fn language(&self) -> Option<&Arc<Language>> {
1478 self.language.as_ref()
1479 }
1480
1481 /// Returns the [`Language`] at the given location.
1482 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1483 let offset = position.to_offset(self);
1484 let mut is_first = true;
1485 let start_anchor = self.anchor_before(offset);
1486 let end_anchor = self.anchor_after(offset);
1487 self.syntax_map
1488 .lock()
1489 .layers_for_range(offset..offset, &self.text, false)
1490 .filter(|layer| {
1491 if is_first {
1492 is_first = false;
1493 return true;
1494 }
1495
1496 layer
1497 .included_sub_ranges
1498 .map(|sub_ranges| {
1499 sub_ranges.iter().any(|sub_range| {
1500 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1501 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1502 !is_before_start && !is_after_end
1503 })
1504 })
1505 .unwrap_or(true)
1506 })
1507 .last()
1508 .map(|info| info.language.clone())
1509 .or_else(|| self.language.clone())
1510 }
1511
1512 /// Returns each [`Language`] for the active syntax layers at the given location.
1513 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1514 let offset = position.to_offset(self);
1515 let mut languages: Vec<Arc<Language>> = self
1516 .syntax_map
1517 .lock()
1518 .layers_for_range(offset..offset, &self.text, false)
1519 .map(|info| info.language.clone())
1520 .collect();
1521
1522 if languages.is_empty()
1523 && let Some(buffer_language) = self.language()
1524 {
1525 languages.push(buffer_language.clone());
1526 }
1527
1528 languages
1529 }
1530
1531 /// An integer version number that accounts for all updates besides
1532 /// the buffer's text itself (which is versioned via a version vector).
1533 pub fn non_text_state_update_count(&self) -> usize {
1534 self.non_text_state_update_count
1535 }
1536
1537 /// Whether the buffer is being parsed in the background.
1538 #[cfg(any(test, feature = "test-support"))]
1539 pub fn is_parsing(&self) -> bool {
1540 self.reparse.is_some()
1541 }
1542
1543 /// Indicates whether the buffer contains any regions that may be
1544 /// written in a language that hasn't been loaded yet.
1545 pub fn contains_unknown_injections(&self) -> bool {
1546 self.syntax_map.lock().contains_unknown_injections()
1547 }
1548
1549 #[cfg(any(test, feature = "test-support"))]
1550 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1551 self.sync_parse_timeout = timeout;
1552 }
1553
1554 /// Called after an edit to synchronize the buffer's main parse tree with
1555 /// the buffer's new underlying state.
1556 ///
1557 /// Locks the syntax map and interpolates the edits since the last reparse
1558 /// into the foreground syntax tree.
1559 ///
1560 /// Then takes a stable snapshot of the syntax map before unlocking it.
1561 /// The snapshot with the interpolated edits is sent to a background thread,
1562 /// where we ask Tree-sitter to perform an incremental parse.
1563 ///
1564 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1565 /// waiting on the parse to complete. As soon as it completes, we proceed
1566 /// synchronously, unless a 1ms timeout elapses.
1567 ///
1568 /// If we time out waiting on the parse, we spawn a second task waiting
1569 /// until the parse does complete and return with the interpolated tree still
1570 /// in the foreground. When the background parse completes, call back into
1571 /// the main thread and assign the foreground parse state.
1572 ///
1573 /// If the buffer or grammar changed since the start of the background parse,
1574 /// initiate an additional reparse recursively. To avoid concurrent parses
1575 /// for the same buffer, we only initiate a new parse if we are not already
1576 /// parsing in the background.
1577 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1578 if self.reparse.is_some() {
1579 return;
1580 }
1581 let language = if let Some(language) = self.language.clone() {
1582 language
1583 } else {
1584 return;
1585 };
1586
1587 let text = self.text_snapshot();
1588 let parsed_version = self.version();
1589
1590 let mut syntax_map = self.syntax_map.lock();
1591 syntax_map.interpolate(&text);
1592 let language_registry = syntax_map.language_registry();
1593 let mut syntax_snapshot = syntax_map.snapshot();
1594 drop(syntax_map);
1595
1596 let parse_task = cx.background_spawn({
1597 let language = language.clone();
1598 let language_registry = language_registry.clone();
1599 async move {
1600 syntax_snapshot.reparse(&text, language_registry, language);
1601 syntax_snapshot
1602 }
1603 });
1604
1605 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1606 match cx
1607 .background_executor()
1608 .block_with_timeout(self.sync_parse_timeout, parse_task)
1609 {
1610 Ok(new_syntax_snapshot) => {
1611 self.did_finish_parsing(new_syntax_snapshot, cx);
1612 self.reparse = None;
1613 }
1614 Err(parse_task) => {
1615 // todo(lw): hot foreground spawn
1616 self.reparse = Some(cx.spawn(async move |this, cx| {
1617 let new_syntax_map = cx.background_spawn(parse_task).await;
1618 this.update(cx, move |this, cx| {
1619 let grammar_changed = || {
1620 this.language.as_ref().is_none_or(|current_language| {
1621 !Arc::ptr_eq(&language, current_language)
1622 })
1623 };
1624 let language_registry_changed = || {
1625 new_syntax_map.contains_unknown_injections()
1626 && language_registry.is_some_and(|registry| {
1627 registry.version() != new_syntax_map.language_registry_version()
1628 })
1629 };
1630 let parse_again = this.version.changed_since(&parsed_version)
1631 || language_registry_changed()
1632 || grammar_changed();
1633 this.did_finish_parsing(new_syntax_map, cx);
1634 this.reparse = None;
1635 if parse_again {
1636 this.reparse(cx);
1637 }
1638 })
1639 .ok();
1640 }));
1641 }
1642 }
1643 }
1644
1645 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1646 self.was_changed();
1647 self.non_text_state_update_count += 1;
1648 self.syntax_map.lock().did_parse(syntax_snapshot);
1649 self.request_autoindent(cx);
1650 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1651 cx.emit(BufferEvent::Reparsed);
1652 cx.notify();
1653 }
1654
1655 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1656 self.parse_status.1.clone()
1657 }
1658
1659 /// Assign to the buffer a set of diagnostics created by a given language server.
1660 pub fn update_diagnostics(
1661 &mut self,
1662 server_id: LanguageServerId,
1663 diagnostics: DiagnosticSet,
1664 cx: &mut Context<Self>,
1665 ) {
1666 let lamport_timestamp = self.text.lamport_clock.tick();
1667 let op = Operation::UpdateDiagnostics {
1668 server_id,
1669 diagnostics: diagnostics.iter().cloned().collect(),
1670 lamport_timestamp,
1671 };
1672
1673 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1674 self.send_operation(op, true, cx);
1675 }
1676
1677 pub fn buffer_diagnostics(
1678 &self,
1679 for_server: Option<LanguageServerId>,
1680 ) -> Vec<&DiagnosticEntry<Anchor>> {
1681 match for_server {
1682 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1683 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1684 Err(_) => Vec::new(),
1685 },
1686 None => self
1687 .diagnostics
1688 .iter()
1689 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1690 .collect(),
1691 }
1692 }
1693
1694 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1695 if let Some(indent_sizes) = self.compute_autoindents() {
1696 let indent_sizes = cx.background_spawn(indent_sizes);
1697 match cx
1698 .background_executor()
1699 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1700 {
1701 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1702 Err(indent_sizes) => {
1703 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1704 let indent_sizes = indent_sizes.await;
1705 this.update(cx, |this, cx| {
1706 this.apply_autoindents(indent_sizes, cx);
1707 })
1708 .ok();
1709 }));
1710 }
1711 }
1712 } else {
1713 self.autoindent_requests.clear();
1714 for tx in self.wait_for_autoindent_txs.drain(..) {
1715 tx.send(()).ok();
1716 }
1717 }
1718 }
1719
1720 fn compute_autoindents(
1721 &self,
1722 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1723 let max_rows_between_yields = 100;
1724 let snapshot = self.snapshot();
1725 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1726 return None;
1727 }
1728
1729 let autoindent_requests = self.autoindent_requests.clone();
1730 Some(async move {
1731 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1732 for request in autoindent_requests {
1733 // Resolve each edited range to its row in the current buffer and in the
1734 // buffer before this batch of edits.
1735 let mut row_ranges = Vec::new();
1736 let mut old_to_new_rows = BTreeMap::new();
1737 let mut language_indent_sizes_by_new_row = Vec::new();
1738 for entry in &request.entries {
1739 let position = entry.range.start;
1740 let new_row = position.to_point(&snapshot).row;
1741 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1742 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1743
1744 if !entry.first_line_is_new {
1745 let old_row = position.to_point(&request.before_edit).row;
1746 old_to_new_rows.insert(old_row, new_row);
1747 }
1748 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1749 }
1750
1751 // Build a map containing the suggested indentation for each of the edited lines
1752 // with respect to the state of the buffer before these edits. This map is keyed
1753 // by the rows for these lines in the current state of the buffer.
1754 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1755 let old_edited_ranges =
1756 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1757 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1758 let mut language_indent_size = IndentSize::default();
1759 for old_edited_range in old_edited_ranges {
1760 let suggestions = request
1761 .before_edit
1762 .suggest_autoindents(old_edited_range.clone())
1763 .into_iter()
1764 .flatten();
1765 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1766 if let Some(suggestion) = suggestion {
1767 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1768
1769 // Find the indent size based on the language for this row.
1770 while let Some((row, size)) = language_indent_sizes.peek() {
1771 if *row > new_row {
1772 break;
1773 }
1774 language_indent_size = *size;
1775 language_indent_sizes.next();
1776 }
1777
1778 let suggested_indent = old_to_new_rows
1779 .get(&suggestion.basis_row)
1780 .and_then(|from_row| {
1781 Some(old_suggestions.get(from_row).copied()?.0)
1782 })
1783 .unwrap_or_else(|| {
1784 request
1785 .before_edit
1786 .indent_size_for_line(suggestion.basis_row)
1787 })
1788 .with_delta(suggestion.delta, language_indent_size);
1789 old_suggestions
1790 .insert(new_row, (suggested_indent, suggestion.within_error));
1791 }
1792 }
1793 yield_now().await;
1794 }
1795
1796 // Compute new suggestions for each line, but only include them in the result
1797 // if they differ from the old suggestion for that line.
1798 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1799 let mut language_indent_size = IndentSize::default();
1800 for (row_range, original_indent_column) in row_ranges {
1801 let new_edited_row_range = if request.is_block_mode {
1802 row_range.start..row_range.start + 1
1803 } else {
1804 row_range.clone()
1805 };
1806
1807 let suggestions = snapshot
1808 .suggest_autoindents(new_edited_row_range.clone())
1809 .into_iter()
1810 .flatten();
1811 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1812 if let Some(suggestion) = suggestion {
1813 // Find the indent size based on the language for this row.
1814 while let Some((row, size)) = language_indent_sizes.peek() {
1815 if *row > new_row {
1816 break;
1817 }
1818 language_indent_size = *size;
1819 language_indent_sizes.next();
1820 }
1821
1822 let suggested_indent = indent_sizes
1823 .get(&suggestion.basis_row)
1824 .copied()
1825 .map(|e| e.0)
1826 .unwrap_or_else(|| {
1827 snapshot.indent_size_for_line(suggestion.basis_row)
1828 })
1829 .with_delta(suggestion.delta, language_indent_size);
1830
1831 if old_suggestions.get(&new_row).is_none_or(
1832 |(old_indentation, was_within_error)| {
1833 suggested_indent != *old_indentation
1834 && (!suggestion.within_error || *was_within_error)
1835 },
1836 ) {
1837 indent_sizes.insert(
1838 new_row,
1839 (suggested_indent, request.ignore_empty_lines),
1840 );
1841 }
1842 }
1843 }
1844
1845 if let (true, Some(original_indent_column)) =
1846 (request.is_block_mode, original_indent_column)
1847 {
1848 let new_indent =
1849 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1850 *indent
1851 } else {
1852 snapshot.indent_size_for_line(row_range.start)
1853 };
1854 let delta = new_indent.len as i64 - original_indent_column as i64;
1855 if delta != 0 {
1856 for row in row_range.skip(1) {
1857 indent_sizes.entry(row).or_insert_with(|| {
1858 let mut size = snapshot.indent_size_for_line(row);
1859 if size.kind == new_indent.kind {
1860 match delta.cmp(&0) {
1861 Ordering::Greater => size.len += delta as u32,
1862 Ordering::Less => {
1863 size.len = size.len.saturating_sub(-delta as u32)
1864 }
1865 Ordering::Equal => {}
1866 }
1867 }
1868 (size, request.ignore_empty_lines)
1869 });
1870 }
1871 }
1872 }
1873
1874 yield_now().await;
1875 }
1876 }
1877
1878 indent_sizes
1879 .into_iter()
1880 .filter_map(|(row, (indent, ignore_empty_lines))| {
1881 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1882 None
1883 } else {
1884 Some((row, indent))
1885 }
1886 })
1887 .collect()
1888 })
1889 }
1890
1891 fn apply_autoindents(
1892 &mut self,
1893 indent_sizes: BTreeMap<u32, IndentSize>,
1894 cx: &mut Context<Self>,
1895 ) {
1896 self.autoindent_requests.clear();
1897 for tx in self.wait_for_autoindent_txs.drain(..) {
1898 tx.send(()).ok();
1899 }
1900
1901 let edits: Vec<_> = indent_sizes
1902 .into_iter()
1903 .filter_map(|(row, indent_size)| {
1904 let current_size = indent_size_for_line(self, row);
1905 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1906 })
1907 .collect();
1908
1909 let preserve_preview = self.preserve_preview();
1910 self.edit(edits, None, cx);
1911 if preserve_preview {
1912 self.refresh_preview();
1913 }
1914 }
1915
1916 /// Create a minimal edit that will cause the given row to be indented
1917 /// with the given size. After applying this edit, the length of the line
1918 /// will always be at least `new_size.len`.
1919 pub fn edit_for_indent_size_adjustment(
1920 row: u32,
1921 current_size: IndentSize,
1922 new_size: IndentSize,
1923 ) -> Option<(Range<Point>, String)> {
1924 if new_size.kind == current_size.kind {
1925 match new_size.len.cmp(¤t_size.len) {
1926 Ordering::Greater => {
1927 let point = Point::new(row, 0);
1928 Some((
1929 point..point,
1930 iter::repeat(new_size.char())
1931 .take((new_size.len - current_size.len) as usize)
1932 .collect::<String>(),
1933 ))
1934 }
1935
1936 Ordering::Less => Some((
1937 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1938 String::new(),
1939 )),
1940
1941 Ordering::Equal => None,
1942 }
1943 } else {
1944 Some((
1945 Point::new(row, 0)..Point::new(row, current_size.len),
1946 iter::repeat(new_size.char())
1947 .take(new_size.len as usize)
1948 .collect::<String>(),
1949 ))
1950 }
1951 }
1952
1953 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1954 /// and the given new text.
1955 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1956 let old_text = self.as_rope().clone();
1957 let base_version = self.version();
1958 cx.background_executor()
1959 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1960 let old_text = old_text.to_string();
1961 let line_ending = LineEnding::detect(&new_text);
1962 LineEnding::normalize(&mut new_text);
1963 let edits = text_diff(&old_text, &new_text);
1964 Diff {
1965 base_version,
1966 line_ending,
1967 edits,
1968 }
1969 })
1970 }
1971
1972 /// Spawns a background task that searches the buffer for any whitespace
1973 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1974 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1975 let old_text = self.as_rope().clone();
1976 let line_ending = self.line_ending();
1977 let base_version = self.version();
1978 cx.background_spawn(async move {
1979 let ranges = trailing_whitespace_ranges(&old_text);
1980 let empty = Arc::<str>::from("");
1981 Diff {
1982 base_version,
1983 line_ending,
1984 edits: ranges
1985 .into_iter()
1986 .map(|range| (range, empty.clone()))
1987 .collect(),
1988 }
1989 })
1990 }
1991
1992 /// Ensures that the buffer ends with a single newline character, and
1993 /// no other whitespace. Skips if the buffer is empty.
1994 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1995 let len = self.len();
1996 if len == 0 {
1997 return;
1998 }
1999 let mut offset = len;
2000 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2001 let non_whitespace_len = chunk
2002 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2003 .len();
2004 offset -= chunk.len();
2005 offset += non_whitespace_len;
2006 if non_whitespace_len != 0 {
2007 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2008 return;
2009 }
2010 break;
2011 }
2012 }
2013 self.edit([(offset..len, "\n")], None, cx);
2014 }
2015
2016 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2017 /// calculated, then adjust the diff to account for those changes, and discard any
2018 /// parts of the diff that conflict with those changes.
2019 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2020 let snapshot = self.snapshot();
2021 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2022 let mut delta = 0;
2023 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2024 while let Some(edit_since) = edits_since.peek() {
2025 // If the edit occurs after a diff hunk, then it does not
2026 // affect that hunk.
2027 if edit_since.old.start > range.end {
2028 break;
2029 }
2030 // If the edit precedes the diff hunk, then adjust the hunk
2031 // to reflect the edit.
2032 else if edit_since.old.end < range.start {
2033 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2034 edits_since.next();
2035 }
2036 // If the edit intersects a diff hunk, then discard that hunk.
2037 else {
2038 return None;
2039 }
2040 }
2041
2042 let start = (range.start as i64 + delta) as usize;
2043 let end = (range.end as i64 + delta) as usize;
2044 Some((start..end, new_text))
2045 });
2046
2047 self.start_transaction();
2048 self.text.set_line_ending(diff.line_ending);
2049 self.edit(adjusted_edits, None, cx);
2050 self.end_transaction(cx)
2051 }
2052
2053 pub fn has_unsaved_edits(&self) -> bool {
2054 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2055
2056 if last_version == self.version {
2057 self.has_unsaved_edits
2058 .set((last_version, has_unsaved_edits));
2059 return has_unsaved_edits;
2060 }
2061
2062 let has_edits = self.has_edits_since(&self.saved_version);
2063 self.has_unsaved_edits
2064 .set((self.version.clone(), has_edits));
2065 has_edits
2066 }
2067
2068 /// Checks if the buffer has unsaved changes.
2069 pub fn is_dirty(&self) -> bool {
2070 if self.capability == Capability::ReadOnly {
2071 return false;
2072 }
2073 if self.has_conflict {
2074 return true;
2075 }
2076 match self.file.as_ref().map(|f| f.disk_state()) {
2077 Some(DiskState::New) | Some(DiskState::Deleted) => {
2078 !self.is_empty() && self.has_unsaved_edits()
2079 }
2080 _ => self.has_unsaved_edits(),
2081 }
2082 }
2083
2084 /// Checks if the buffer and its file have both changed since the buffer
2085 /// was last saved or reloaded.
2086 pub fn has_conflict(&self) -> bool {
2087 if self.has_conflict {
2088 return true;
2089 }
2090 let Some(file) = self.file.as_ref() else {
2091 return false;
2092 };
2093 match file.disk_state() {
2094 DiskState::New => false,
2095 DiskState::Present { mtime } => match self.saved_mtime {
2096 Some(saved_mtime) => {
2097 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2098 }
2099 None => true,
2100 },
2101 DiskState::Deleted => false,
2102 }
2103 }
2104
2105 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2106 pub fn subscribe(&mut self) -> Subscription {
2107 self.text.subscribe()
2108 }
2109
2110 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2111 ///
2112 /// This allows downstream code to check if the buffer's text has changed without
2113 /// waiting for an effect cycle, which would be required if using eents.
2114 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2115 if let Err(ix) = self
2116 .change_bits
2117 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2118 {
2119 self.change_bits.insert(ix, bit);
2120 }
2121 }
2122
2123 /// Set the change bit for all "listeners".
2124 fn was_changed(&mut self) {
2125 self.change_bits.retain(|change_bit| {
2126 change_bit
2127 .upgrade()
2128 .inspect(|bit| {
2129 _ = bit.replace(true);
2130 })
2131 .is_some()
2132 });
2133 }
2134
2135 /// Starts a transaction, if one is not already in-progress. When undoing or
2136 /// redoing edits, all of the edits performed within a transaction are undone
2137 /// or redone together.
2138 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2139 self.start_transaction_at(Instant::now())
2140 }
2141
2142 /// Starts a transaction, providing the current time. Subsequent transactions
2143 /// that occur within a short period of time will be grouped together. This
2144 /// is controlled by the buffer's undo grouping duration.
2145 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2146 self.transaction_depth += 1;
2147 if self.was_dirty_before_starting_transaction.is_none() {
2148 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2149 }
2150 self.text.start_transaction_at(now)
2151 }
2152
2153 /// Terminates the current transaction, if this is the outermost transaction.
2154 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2155 self.end_transaction_at(Instant::now(), cx)
2156 }
2157
2158 /// Terminates the current transaction, providing the current time. Subsequent transactions
2159 /// that occur within a short period of time will be grouped together. This
2160 /// is controlled by the buffer's undo grouping duration.
2161 pub fn end_transaction_at(
2162 &mut self,
2163 now: Instant,
2164 cx: &mut Context<Self>,
2165 ) -> Option<TransactionId> {
2166 assert!(self.transaction_depth > 0);
2167 self.transaction_depth -= 1;
2168 let was_dirty = if self.transaction_depth == 0 {
2169 self.was_dirty_before_starting_transaction.take().unwrap()
2170 } else {
2171 false
2172 };
2173 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2174 self.did_edit(&start_version, was_dirty, cx);
2175 Some(transaction_id)
2176 } else {
2177 None
2178 }
2179 }
2180
2181 /// Manually add a transaction to the buffer's undo history.
2182 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2183 self.text.push_transaction(transaction, now);
2184 }
2185
2186 /// Differs from `push_transaction` in that it does not clear the redo
2187 /// stack. Intended to be used to create a parent transaction to merge
2188 /// potential child transactions into.
2189 ///
2190 /// The caller is responsible for removing it from the undo history using
2191 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2192 /// are merged into this transaction, the caller is responsible for ensuring
2193 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2194 /// cleared is to create transactions with the usual `start_transaction` and
2195 /// `end_transaction` methods and merging the resulting transactions into
2196 /// the transaction created by this method
2197 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2198 self.text.push_empty_transaction(now)
2199 }
2200
2201 /// Prevent the last transaction from being grouped with any subsequent transactions,
2202 /// even if they occur with the buffer's undo grouping duration.
2203 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2204 self.text.finalize_last_transaction()
2205 }
2206
2207 /// Manually group all changes since a given transaction.
2208 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2209 self.text.group_until_transaction(transaction_id);
2210 }
2211
2212 /// Manually remove a transaction from the buffer's undo history
2213 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2214 self.text.forget_transaction(transaction_id)
2215 }
2216
2217 /// Retrieve a transaction from the buffer's undo history
2218 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2219 self.text.get_transaction(transaction_id)
2220 }
2221
2222 /// Manually merge two transactions in the buffer's undo history.
2223 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2224 self.text.merge_transactions(transaction, destination);
2225 }
2226
2227 /// Waits for the buffer to receive operations with the given timestamps.
2228 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2229 &mut self,
2230 edit_ids: It,
2231 ) -> impl Future<Output = Result<()>> + use<It> {
2232 self.text.wait_for_edits(edit_ids)
2233 }
2234
2235 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2236 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2237 &mut self,
2238 anchors: It,
2239 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2240 self.text.wait_for_anchors(anchors)
2241 }
2242
2243 /// Waits for the buffer to receive operations up to the given version.
2244 pub fn wait_for_version(
2245 &mut self,
2246 version: clock::Global,
2247 ) -> impl Future<Output = Result<()>> + use<> {
2248 self.text.wait_for_version(version)
2249 }
2250
2251 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2252 /// [`Buffer::wait_for_version`] to resolve with an error.
2253 pub fn give_up_waiting(&mut self) {
2254 self.text.give_up_waiting();
2255 }
2256
2257 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2258 let mut rx = None;
2259 if !self.autoindent_requests.is_empty() {
2260 let channel = oneshot::channel();
2261 self.wait_for_autoindent_txs.push(channel.0);
2262 rx = Some(channel.1);
2263 }
2264 rx
2265 }
2266
2267 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2268 pub fn set_active_selections(
2269 &mut self,
2270 selections: Arc<[Selection<Anchor>]>,
2271 line_mode: bool,
2272 cursor_shape: CursorShape,
2273 cx: &mut Context<Self>,
2274 ) {
2275 let lamport_timestamp = self.text.lamport_clock.tick();
2276 self.remote_selections.insert(
2277 self.text.replica_id(),
2278 SelectionSet {
2279 selections: selections.clone(),
2280 lamport_timestamp,
2281 line_mode,
2282 cursor_shape,
2283 },
2284 );
2285 self.send_operation(
2286 Operation::UpdateSelections {
2287 selections,
2288 line_mode,
2289 lamport_timestamp,
2290 cursor_shape,
2291 },
2292 true,
2293 cx,
2294 );
2295 self.non_text_state_update_count += 1;
2296 cx.notify();
2297 }
2298
2299 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2300 /// this replica.
2301 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2302 if self
2303 .remote_selections
2304 .get(&self.text.replica_id())
2305 .is_none_or(|set| !set.selections.is_empty())
2306 {
2307 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2308 }
2309 }
2310
2311 pub fn set_agent_selections(
2312 &mut self,
2313 selections: Arc<[Selection<Anchor>]>,
2314 line_mode: bool,
2315 cursor_shape: CursorShape,
2316 cx: &mut Context<Self>,
2317 ) {
2318 let lamport_timestamp = self.text.lamport_clock.tick();
2319 self.remote_selections.insert(
2320 ReplicaId::AGENT,
2321 SelectionSet {
2322 selections,
2323 lamport_timestamp,
2324 line_mode,
2325 cursor_shape,
2326 },
2327 );
2328 self.non_text_state_update_count += 1;
2329 cx.notify();
2330 }
2331
2332 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2333 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2334 }
2335
2336 /// Replaces the buffer's entire text.
2337 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2338 where
2339 T: Into<Arc<str>>,
2340 {
2341 self.autoindent_requests.clear();
2342 self.edit([(0..self.len(), text)], None, cx)
2343 }
2344
2345 /// Appends the given text to the end of the buffer.
2346 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2347 where
2348 T: Into<Arc<str>>,
2349 {
2350 self.edit([(self.len()..self.len(), text)], None, cx)
2351 }
2352
2353 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2354 /// delete, and a string of text to insert at that location.
2355 ///
2356 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2357 /// request for the edited ranges, which will be processed when the buffer finishes
2358 /// parsing.
2359 ///
2360 /// Parsing takes place at the end of a transaction, and may compute synchronously
2361 /// or asynchronously, depending on the changes.
2362 pub fn edit<I, S, T>(
2363 &mut self,
2364 edits_iter: I,
2365 autoindent_mode: Option<AutoindentMode>,
2366 cx: &mut Context<Self>,
2367 ) -> Option<clock::Lamport>
2368 where
2369 I: IntoIterator<Item = (Range<S>, T)>,
2370 S: ToOffset,
2371 T: Into<Arc<str>>,
2372 {
2373 // Skip invalid edits and coalesce contiguous ones.
2374 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2375
2376 for (range, new_text) in edits_iter {
2377 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2378
2379 if range.start > range.end {
2380 mem::swap(&mut range.start, &mut range.end);
2381 }
2382 let new_text = new_text.into();
2383 if !new_text.is_empty() || !range.is_empty() {
2384 if let Some((prev_range, prev_text)) = edits.last_mut()
2385 && prev_range.end >= range.start
2386 {
2387 prev_range.end = cmp::max(prev_range.end, range.end);
2388 *prev_text = format!("{prev_text}{new_text}").into();
2389 } else {
2390 edits.push((range, new_text));
2391 }
2392 }
2393 }
2394 if edits.is_empty() {
2395 return None;
2396 }
2397
2398 self.start_transaction();
2399 self.pending_autoindent.take();
2400 let autoindent_request = autoindent_mode
2401 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2402
2403 let edit_operation = self
2404 .text
2405 .edit(edits.iter().cloned(), cx.background_executor());
2406 let edit_id = edit_operation.timestamp();
2407
2408 if let Some((before_edit, mode)) = autoindent_request {
2409 let mut delta = 0isize;
2410 let mut previous_setting = None;
2411 let entries: Vec<_> = edits
2412 .into_iter()
2413 .enumerate()
2414 .zip(&edit_operation.as_edit().unwrap().new_text)
2415 .filter(|((_, (range, _)), _)| {
2416 let language = before_edit.language_at(range.start);
2417 let language_id = language.map(|l| l.id());
2418 if let Some((cached_language_id, auto_indent)) = previous_setting
2419 && cached_language_id == language_id
2420 {
2421 auto_indent
2422 } else {
2423 // The auto-indent setting is not present in editorconfigs, hence
2424 // we can avoid passing the file here.
2425 let auto_indent =
2426 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2427 previous_setting = Some((language_id, auto_indent));
2428 auto_indent
2429 }
2430 })
2431 .map(|((ix, (range, _)), new_text)| {
2432 let new_text_length = new_text.len();
2433 let old_start = range.start.to_point(&before_edit);
2434 let new_start = (delta + range.start as isize) as usize;
2435 let range_len = range.end - range.start;
2436 delta += new_text_length as isize - range_len as isize;
2437
2438 // Decide what range of the insertion to auto-indent, and whether
2439 // the first line of the insertion should be considered a newly-inserted line
2440 // or an edit to an existing line.
2441 let mut range_of_insertion_to_indent = 0..new_text_length;
2442 let mut first_line_is_new = true;
2443
2444 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2445 let old_line_end = before_edit.line_len(old_start.row);
2446
2447 if old_start.column > old_line_start {
2448 first_line_is_new = false;
2449 }
2450
2451 if !new_text.contains('\n')
2452 && (old_start.column + (range_len as u32) < old_line_end
2453 || old_line_end == old_line_start)
2454 {
2455 first_line_is_new = false;
2456 }
2457
2458 // When inserting text starting with a newline, avoid auto-indenting the
2459 // previous line.
2460 if new_text.starts_with('\n') {
2461 range_of_insertion_to_indent.start += 1;
2462 first_line_is_new = true;
2463 }
2464
2465 let mut original_indent_column = None;
2466 if let AutoindentMode::Block {
2467 original_indent_columns,
2468 } = &mode
2469 {
2470 original_indent_column = Some(if new_text.starts_with('\n') {
2471 indent_size_for_text(
2472 new_text[range_of_insertion_to_indent.clone()].chars(),
2473 )
2474 .len
2475 } else {
2476 original_indent_columns
2477 .get(ix)
2478 .copied()
2479 .flatten()
2480 .unwrap_or_else(|| {
2481 indent_size_for_text(
2482 new_text[range_of_insertion_to_indent.clone()].chars(),
2483 )
2484 .len
2485 })
2486 });
2487
2488 // Avoid auto-indenting the line after the edit.
2489 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2490 range_of_insertion_to_indent.end -= 1;
2491 }
2492 }
2493
2494 AutoindentRequestEntry {
2495 first_line_is_new,
2496 original_indent_column,
2497 indent_size: before_edit.language_indent_size_at(range.start, cx),
2498 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2499 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2500 }
2501 })
2502 .collect();
2503
2504 if !entries.is_empty() {
2505 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2506 before_edit,
2507 entries,
2508 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2509 ignore_empty_lines: false,
2510 }));
2511 }
2512 }
2513
2514 self.end_transaction(cx);
2515 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2516 Some(edit_id)
2517 }
2518
2519 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2520 self.was_changed();
2521
2522 if self.edits_since::<usize>(old_version).next().is_none() {
2523 return;
2524 }
2525
2526 self.reparse(cx);
2527 cx.emit(BufferEvent::Edited);
2528 if was_dirty != self.is_dirty() {
2529 cx.emit(BufferEvent::DirtyChanged);
2530 }
2531 cx.notify();
2532 }
2533
2534 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2535 where
2536 I: IntoIterator<Item = Range<T>>,
2537 T: ToOffset + Copy,
2538 {
2539 let before_edit = self.snapshot();
2540 let entries = ranges
2541 .into_iter()
2542 .map(|range| AutoindentRequestEntry {
2543 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2544 first_line_is_new: true,
2545 indent_size: before_edit.language_indent_size_at(range.start, cx),
2546 original_indent_column: None,
2547 })
2548 .collect();
2549 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2550 before_edit,
2551 entries,
2552 is_block_mode: false,
2553 ignore_empty_lines: true,
2554 }));
2555 self.request_autoindent(cx);
2556 }
2557
2558 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2559 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2560 pub fn insert_empty_line(
2561 &mut self,
2562 position: impl ToPoint,
2563 space_above: bool,
2564 space_below: bool,
2565 cx: &mut Context<Self>,
2566 ) -> Point {
2567 let mut position = position.to_point(self);
2568
2569 self.start_transaction();
2570
2571 self.edit(
2572 [(position..position, "\n")],
2573 Some(AutoindentMode::EachLine),
2574 cx,
2575 );
2576
2577 if position.column > 0 {
2578 position += Point::new(1, 0);
2579 }
2580
2581 if !self.is_line_blank(position.row) {
2582 self.edit(
2583 [(position..position, "\n")],
2584 Some(AutoindentMode::EachLine),
2585 cx,
2586 );
2587 }
2588
2589 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2590 self.edit(
2591 [(position..position, "\n")],
2592 Some(AutoindentMode::EachLine),
2593 cx,
2594 );
2595 position.row += 1;
2596 }
2597
2598 if space_below
2599 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2600 {
2601 self.edit(
2602 [(position..position, "\n")],
2603 Some(AutoindentMode::EachLine),
2604 cx,
2605 );
2606 }
2607
2608 self.end_transaction(cx);
2609
2610 position
2611 }
2612
2613 /// Applies the given remote operations to the buffer.
2614 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2615 self.pending_autoindent.take();
2616 let was_dirty = self.is_dirty();
2617 let old_version = self.version.clone();
2618 let mut deferred_ops = Vec::new();
2619 let buffer_ops = ops
2620 .into_iter()
2621 .filter_map(|op| match op {
2622 Operation::Buffer(op) => Some(op),
2623 _ => {
2624 if self.can_apply_op(&op) {
2625 self.apply_op(op, cx);
2626 } else {
2627 deferred_ops.push(op);
2628 }
2629 None
2630 }
2631 })
2632 .collect::<Vec<_>>();
2633 for operation in buffer_ops.iter() {
2634 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2635 }
2636 self.text
2637 .apply_ops(buffer_ops, Some(cx.background_executor()));
2638 self.deferred_ops.insert(deferred_ops);
2639 self.flush_deferred_ops(cx);
2640 self.did_edit(&old_version, was_dirty, cx);
2641 // Notify independently of whether the buffer was edited as the operations could include a
2642 // selection update.
2643 cx.notify();
2644 }
2645
2646 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2647 let mut deferred_ops = Vec::new();
2648 for op in self.deferred_ops.drain().iter().cloned() {
2649 if self.can_apply_op(&op) {
2650 self.apply_op(op, cx);
2651 } else {
2652 deferred_ops.push(op);
2653 }
2654 }
2655 self.deferred_ops.insert(deferred_ops);
2656 }
2657
2658 pub fn has_deferred_ops(&self) -> bool {
2659 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2660 }
2661
2662 fn can_apply_op(&self, operation: &Operation) -> bool {
2663 match operation {
2664 Operation::Buffer(_) => {
2665 unreachable!("buffer operations should never be applied at this layer")
2666 }
2667 Operation::UpdateDiagnostics {
2668 diagnostics: diagnostic_set,
2669 ..
2670 } => diagnostic_set.iter().all(|diagnostic| {
2671 self.text.can_resolve(&diagnostic.range.start)
2672 && self.text.can_resolve(&diagnostic.range.end)
2673 }),
2674 Operation::UpdateSelections { selections, .. } => selections
2675 .iter()
2676 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2677 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2678 }
2679 }
2680
2681 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2682 match operation {
2683 Operation::Buffer(_) => {
2684 unreachable!("buffer operations should never be applied at this layer")
2685 }
2686 Operation::UpdateDiagnostics {
2687 server_id,
2688 diagnostics: diagnostic_set,
2689 lamport_timestamp,
2690 } => {
2691 let snapshot = self.snapshot();
2692 self.apply_diagnostic_update(
2693 server_id,
2694 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2695 lamport_timestamp,
2696 cx,
2697 );
2698 }
2699 Operation::UpdateSelections {
2700 selections,
2701 lamport_timestamp,
2702 line_mode,
2703 cursor_shape,
2704 } => {
2705 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2706 && set.lamport_timestamp > lamport_timestamp
2707 {
2708 return;
2709 }
2710
2711 self.remote_selections.insert(
2712 lamport_timestamp.replica_id,
2713 SelectionSet {
2714 selections,
2715 lamport_timestamp,
2716 line_mode,
2717 cursor_shape,
2718 },
2719 );
2720 self.text.lamport_clock.observe(lamport_timestamp);
2721 self.non_text_state_update_count += 1;
2722 }
2723 Operation::UpdateCompletionTriggers {
2724 triggers,
2725 lamport_timestamp,
2726 server_id,
2727 } => {
2728 if triggers.is_empty() {
2729 self.completion_triggers_per_language_server
2730 .remove(&server_id);
2731 self.completion_triggers = self
2732 .completion_triggers_per_language_server
2733 .values()
2734 .flat_map(|triggers| triggers.iter().cloned())
2735 .collect();
2736 } else {
2737 self.completion_triggers_per_language_server
2738 .insert(server_id, triggers.iter().cloned().collect());
2739 self.completion_triggers.extend(triggers);
2740 }
2741 self.text.lamport_clock.observe(lamport_timestamp);
2742 }
2743 Operation::UpdateLineEnding {
2744 line_ending,
2745 lamport_timestamp,
2746 } => {
2747 self.text.set_line_ending(line_ending);
2748 self.text.lamport_clock.observe(lamport_timestamp);
2749 }
2750 }
2751 }
2752
2753 fn apply_diagnostic_update(
2754 &mut self,
2755 server_id: LanguageServerId,
2756 diagnostics: DiagnosticSet,
2757 lamport_timestamp: clock::Lamport,
2758 cx: &mut Context<Self>,
2759 ) {
2760 if lamport_timestamp > self.diagnostics_timestamp {
2761 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2762 if diagnostics.is_empty() {
2763 if let Ok(ix) = ix {
2764 self.diagnostics.remove(ix);
2765 }
2766 } else {
2767 match ix {
2768 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2769 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2770 };
2771 }
2772 self.diagnostics_timestamp = lamport_timestamp;
2773 self.non_text_state_update_count += 1;
2774 self.text.lamport_clock.observe(lamport_timestamp);
2775 cx.notify();
2776 cx.emit(BufferEvent::DiagnosticsUpdated);
2777 }
2778 }
2779
2780 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2781 self.was_changed();
2782 cx.emit(BufferEvent::Operation {
2783 operation,
2784 is_local,
2785 });
2786 }
2787
2788 /// Removes the selections for a given peer.
2789 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2790 self.remote_selections.remove(&replica_id);
2791 cx.notify();
2792 }
2793
2794 /// Undoes the most recent transaction.
2795 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2796 let was_dirty = self.is_dirty();
2797 let old_version = self.version.clone();
2798
2799 if let Some((transaction_id, operation)) = self.text.undo() {
2800 self.send_operation(Operation::Buffer(operation), true, cx);
2801 self.did_edit(&old_version, was_dirty, cx);
2802 Some(transaction_id)
2803 } else {
2804 None
2805 }
2806 }
2807
2808 /// Manually undoes a specific transaction in the buffer's undo history.
2809 pub fn undo_transaction(
2810 &mut self,
2811 transaction_id: TransactionId,
2812 cx: &mut Context<Self>,
2813 ) -> bool {
2814 let was_dirty = self.is_dirty();
2815 let old_version = self.version.clone();
2816 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2817 self.send_operation(Operation::Buffer(operation), true, cx);
2818 self.did_edit(&old_version, was_dirty, cx);
2819 true
2820 } else {
2821 false
2822 }
2823 }
2824
2825 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2826 pub fn undo_to_transaction(
2827 &mut self,
2828 transaction_id: TransactionId,
2829 cx: &mut Context<Self>,
2830 ) -> bool {
2831 let was_dirty = self.is_dirty();
2832 let old_version = self.version.clone();
2833
2834 let operations = self.text.undo_to_transaction(transaction_id);
2835 let undone = !operations.is_empty();
2836 for operation in operations {
2837 self.send_operation(Operation::Buffer(operation), true, cx);
2838 }
2839 if undone {
2840 self.did_edit(&old_version, was_dirty, cx)
2841 }
2842 undone
2843 }
2844
2845 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2846 let was_dirty = self.is_dirty();
2847 let operation = self.text.undo_operations(counts);
2848 let old_version = self.version.clone();
2849 self.send_operation(Operation::Buffer(operation), true, cx);
2850 self.did_edit(&old_version, was_dirty, cx);
2851 }
2852
2853 /// Manually redoes a specific transaction in the buffer's redo history.
2854 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2855 let was_dirty = self.is_dirty();
2856 let old_version = self.version.clone();
2857
2858 if let Some((transaction_id, operation)) = self.text.redo() {
2859 self.send_operation(Operation::Buffer(operation), true, cx);
2860 self.did_edit(&old_version, was_dirty, cx);
2861 Some(transaction_id)
2862 } else {
2863 None
2864 }
2865 }
2866
2867 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2868 pub fn redo_to_transaction(
2869 &mut self,
2870 transaction_id: TransactionId,
2871 cx: &mut Context<Self>,
2872 ) -> bool {
2873 let was_dirty = self.is_dirty();
2874 let old_version = self.version.clone();
2875
2876 let operations = self.text.redo_to_transaction(transaction_id);
2877 let redone = !operations.is_empty();
2878 for operation in operations {
2879 self.send_operation(Operation::Buffer(operation), true, cx);
2880 }
2881 if redone {
2882 self.did_edit(&old_version, was_dirty, cx)
2883 }
2884 redone
2885 }
2886
2887 /// Override current completion triggers with the user-provided completion triggers.
2888 pub fn set_completion_triggers(
2889 &mut self,
2890 server_id: LanguageServerId,
2891 triggers: BTreeSet<String>,
2892 cx: &mut Context<Self>,
2893 ) {
2894 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2895 if triggers.is_empty() {
2896 self.completion_triggers_per_language_server
2897 .remove(&server_id);
2898 self.completion_triggers = self
2899 .completion_triggers_per_language_server
2900 .values()
2901 .flat_map(|triggers| triggers.iter().cloned())
2902 .collect();
2903 } else {
2904 self.completion_triggers_per_language_server
2905 .insert(server_id, triggers.clone());
2906 self.completion_triggers.extend(triggers.iter().cloned());
2907 }
2908 self.send_operation(
2909 Operation::UpdateCompletionTriggers {
2910 triggers: triggers.into_iter().collect(),
2911 lamport_timestamp: self.completion_triggers_timestamp,
2912 server_id,
2913 },
2914 true,
2915 cx,
2916 );
2917 cx.notify();
2918 }
2919
2920 /// Returns a list of strings which trigger a completion menu for this language.
2921 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2922 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2923 &self.completion_triggers
2924 }
2925
2926 /// Call this directly after performing edits to prevent the preview tab
2927 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2928 /// to return false until there are additional edits.
2929 pub fn refresh_preview(&mut self) {
2930 self.preview_version = self.version.clone();
2931 }
2932
2933 /// Whether we should preserve the preview status of a tab containing this buffer.
2934 pub fn preserve_preview(&self) -> bool {
2935 !self.has_edits_since(&self.preview_version)
2936 }
2937
2938 /// Update the `encoding` field, whenever the `encoding` field of the file changes
2939 pub fn update_encoding(&mut self) {
2940 if let Some(file) = self.file() {
2941 if let Some(encoding) = file.encoding() {
2942 *self.encoding.lock().unwrap() = *encoding.lock().unwrap();
2943 } else {
2944 *self.encoding.lock().unwrap() = encoding_rs::UTF_8;
2945 };
2946 }
2947 }
2948}
2949
2950#[doc(hidden)]
2951#[cfg(any(test, feature = "test-support"))]
2952impl Buffer {
2953 pub fn edit_via_marked_text(
2954 &mut self,
2955 marked_string: &str,
2956 autoindent_mode: Option<AutoindentMode>,
2957 cx: &mut Context<Self>,
2958 ) {
2959 let edits = self.edits_for_marked_text(marked_string);
2960 self.edit(edits, autoindent_mode, cx);
2961 }
2962
2963 pub fn set_group_interval(&mut self, group_interval: Duration) {
2964 self.text.set_group_interval(group_interval);
2965 }
2966
2967 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2968 where
2969 T: rand::Rng,
2970 {
2971 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2972 let mut last_end = None;
2973 for _ in 0..old_range_count {
2974 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2975 break;
2976 }
2977
2978 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2979 let mut range = self.random_byte_range(new_start, rng);
2980 if rng.random_bool(0.2) {
2981 mem::swap(&mut range.start, &mut range.end);
2982 }
2983 last_end = Some(range.end);
2984
2985 let new_text_len = rng.random_range(0..10);
2986 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2987 new_text = new_text.to_uppercase();
2988
2989 edits.push((range, new_text));
2990 }
2991 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2992 self.edit(edits, None, cx);
2993 }
2994
2995 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2996 let was_dirty = self.is_dirty();
2997 let old_version = self.version.clone();
2998
2999 let ops = self.text.randomly_undo_redo(rng);
3000 if !ops.is_empty() {
3001 for op in ops {
3002 self.send_operation(Operation::Buffer(op), true, cx);
3003 self.did_edit(&old_version, was_dirty, cx);
3004 }
3005 }
3006 }
3007}
3008
3009impl EventEmitter<BufferEvent> for Buffer {}
3010
3011impl Deref for Buffer {
3012 type Target = TextBuffer;
3013
3014 fn deref(&self) -> &Self::Target {
3015 &self.text
3016 }
3017}
3018
3019impl BufferSnapshot {
3020 /// Returns [`IndentSize`] for a given line that respects user settings and
3021 /// language preferences.
3022 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3023 indent_size_for_line(self, row)
3024 }
3025
3026 /// Returns [`IndentSize`] for a given position that respects user settings
3027 /// and language preferences.
3028 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3029 let settings = language_settings(
3030 self.language_at(position).map(|l| l.name()),
3031 self.file(),
3032 cx,
3033 );
3034 if settings.hard_tabs {
3035 IndentSize::tab()
3036 } else {
3037 IndentSize::spaces(settings.tab_size.get())
3038 }
3039 }
3040
3041 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3042 /// is passed in as `single_indent_size`.
3043 pub fn suggested_indents(
3044 &self,
3045 rows: impl Iterator<Item = u32>,
3046 single_indent_size: IndentSize,
3047 ) -> BTreeMap<u32, IndentSize> {
3048 let mut result = BTreeMap::new();
3049
3050 for row_range in contiguous_ranges(rows, 10) {
3051 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3052 Some(suggestions) => suggestions,
3053 _ => break,
3054 };
3055
3056 for (row, suggestion) in row_range.zip(suggestions) {
3057 let indent_size = if let Some(suggestion) = suggestion {
3058 result
3059 .get(&suggestion.basis_row)
3060 .copied()
3061 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3062 .with_delta(suggestion.delta, single_indent_size)
3063 } else {
3064 self.indent_size_for_line(row)
3065 };
3066
3067 result.insert(row, indent_size);
3068 }
3069 }
3070
3071 result
3072 }
3073
3074 fn suggest_autoindents(
3075 &self,
3076 row_range: Range<u32>,
3077 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3078 let config = &self.language.as_ref()?.config;
3079 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3080
3081 #[derive(Debug, Clone)]
3082 struct StartPosition {
3083 start: Point,
3084 suffix: SharedString,
3085 }
3086
3087 // Find the suggested indentation ranges based on the syntax tree.
3088 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3089 let end = Point::new(row_range.end, 0);
3090 let range = (start..end).to_offset(&self.text);
3091 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3092 Some(&grammar.indents_config.as_ref()?.query)
3093 });
3094 let indent_configs = matches
3095 .grammars()
3096 .iter()
3097 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3098 .collect::<Vec<_>>();
3099
3100 let mut indent_ranges = Vec::<Range<Point>>::new();
3101 let mut start_positions = Vec::<StartPosition>::new();
3102 let mut outdent_positions = Vec::<Point>::new();
3103 while let Some(mat) = matches.peek() {
3104 let mut start: Option<Point> = None;
3105 let mut end: Option<Point> = None;
3106
3107 let config = indent_configs[mat.grammar_index];
3108 for capture in mat.captures {
3109 if capture.index == config.indent_capture_ix {
3110 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3111 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3112 } else if Some(capture.index) == config.start_capture_ix {
3113 start = Some(Point::from_ts_point(capture.node.end_position()));
3114 } else if Some(capture.index) == config.end_capture_ix {
3115 end = Some(Point::from_ts_point(capture.node.start_position()));
3116 } else if Some(capture.index) == config.outdent_capture_ix {
3117 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3118 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3119 start_positions.push(StartPosition {
3120 start: Point::from_ts_point(capture.node.start_position()),
3121 suffix: suffix.clone(),
3122 });
3123 }
3124 }
3125
3126 matches.advance();
3127 if let Some((start, end)) = start.zip(end) {
3128 if start.row == end.row {
3129 continue;
3130 }
3131 let range = start..end;
3132 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3133 Err(ix) => indent_ranges.insert(ix, range),
3134 Ok(ix) => {
3135 let prev_range = &mut indent_ranges[ix];
3136 prev_range.end = prev_range.end.max(range.end);
3137 }
3138 }
3139 }
3140 }
3141
3142 let mut error_ranges = Vec::<Range<Point>>::new();
3143 let mut matches = self
3144 .syntax
3145 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3146 while let Some(mat) = matches.peek() {
3147 let node = mat.captures[0].node;
3148 let start = Point::from_ts_point(node.start_position());
3149 let end = Point::from_ts_point(node.end_position());
3150 let range = start..end;
3151 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3152 Ok(ix) | Err(ix) => ix,
3153 };
3154 let mut end_ix = ix;
3155 while let Some(existing_range) = error_ranges.get(end_ix) {
3156 if existing_range.end < end {
3157 end_ix += 1;
3158 } else {
3159 break;
3160 }
3161 }
3162 error_ranges.splice(ix..end_ix, [range]);
3163 matches.advance();
3164 }
3165
3166 outdent_positions.sort();
3167 for outdent_position in outdent_positions {
3168 // find the innermost indent range containing this outdent_position
3169 // set its end to the outdent position
3170 if let Some(range_to_truncate) = indent_ranges
3171 .iter_mut()
3172 .filter(|indent_range| indent_range.contains(&outdent_position))
3173 .next_back()
3174 {
3175 range_to_truncate.end = outdent_position;
3176 }
3177 }
3178
3179 start_positions.sort_by_key(|b| b.start);
3180
3181 // Find the suggested indentation increases and decreased based on regexes.
3182 let mut regex_outdent_map = HashMap::default();
3183 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3184 let mut start_positions_iter = start_positions.iter().peekable();
3185
3186 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3187 self.for_each_line(
3188 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3189 ..Point::new(row_range.end, 0),
3190 |row, line| {
3191 if config
3192 .decrease_indent_pattern
3193 .as_ref()
3194 .is_some_and(|regex| regex.is_match(line))
3195 {
3196 indent_change_rows.push((row, Ordering::Less));
3197 }
3198 if config
3199 .increase_indent_pattern
3200 .as_ref()
3201 .is_some_and(|regex| regex.is_match(line))
3202 {
3203 indent_change_rows.push((row + 1, Ordering::Greater));
3204 }
3205 while let Some(pos) = start_positions_iter.peek() {
3206 if pos.start.row < row {
3207 let pos = start_positions_iter.next().unwrap();
3208 last_seen_suffix
3209 .entry(pos.suffix.to_string())
3210 .or_default()
3211 .push(pos.start);
3212 } else {
3213 break;
3214 }
3215 }
3216 for rule in &config.decrease_indent_patterns {
3217 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3218 let row_start_column = self.indent_size_for_line(row).len;
3219 let basis_row = rule
3220 .valid_after
3221 .iter()
3222 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3223 .flatten()
3224 .filter(|start_point| start_point.column <= row_start_column)
3225 .max_by_key(|start_point| start_point.row);
3226 if let Some(outdent_to_row) = basis_row {
3227 regex_outdent_map.insert(row, outdent_to_row.row);
3228 }
3229 break;
3230 }
3231 }
3232 },
3233 );
3234
3235 let mut indent_changes = indent_change_rows.into_iter().peekable();
3236 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3237 prev_non_blank_row.unwrap_or(0)
3238 } else {
3239 row_range.start.saturating_sub(1)
3240 };
3241
3242 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3243 Some(row_range.map(move |row| {
3244 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3245
3246 let mut indent_from_prev_row = false;
3247 let mut outdent_from_prev_row = false;
3248 let mut outdent_to_row = u32::MAX;
3249 let mut from_regex = false;
3250
3251 while let Some((indent_row, delta)) = indent_changes.peek() {
3252 match indent_row.cmp(&row) {
3253 Ordering::Equal => match delta {
3254 Ordering::Less => {
3255 from_regex = true;
3256 outdent_from_prev_row = true
3257 }
3258 Ordering::Greater => {
3259 indent_from_prev_row = true;
3260 from_regex = true
3261 }
3262 _ => {}
3263 },
3264
3265 Ordering::Greater => break,
3266 Ordering::Less => {}
3267 }
3268
3269 indent_changes.next();
3270 }
3271
3272 for range in &indent_ranges {
3273 if range.start.row >= row {
3274 break;
3275 }
3276 if range.start.row == prev_row && range.end > row_start {
3277 indent_from_prev_row = true;
3278 }
3279 if range.end > prev_row_start && range.end <= row_start {
3280 outdent_to_row = outdent_to_row.min(range.start.row);
3281 }
3282 }
3283
3284 if let Some(basis_row) = regex_outdent_map.get(&row) {
3285 indent_from_prev_row = false;
3286 outdent_to_row = *basis_row;
3287 from_regex = true;
3288 }
3289
3290 let within_error = error_ranges
3291 .iter()
3292 .any(|e| e.start.row < row && e.end > row_start);
3293
3294 let suggestion = if outdent_to_row == prev_row
3295 || (outdent_from_prev_row && indent_from_prev_row)
3296 {
3297 Some(IndentSuggestion {
3298 basis_row: prev_row,
3299 delta: Ordering::Equal,
3300 within_error: within_error && !from_regex,
3301 })
3302 } else if indent_from_prev_row {
3303 Some(IndentSuggestion {
3304 basis_row: prev_row,
3305 delta: Ordering::Greater,
3306 within_error: within_error && !from_regex,
3307 })
3308 } else if outdent_to_row < prev_row {
3309 Some(IndentSuggestion {
3310 basis_row: outdent_to_row,
3311 delta: Ordering::Equal,
3312 within_error: within_error && !from_regex,
3313 })
3314 } else if outdent_from_prev_row {
3315 Some(IndentSuggestion {
3316 basis_row: prev_row,
3317 delta: Ordering::Less,
3318 within_error: within_error && !from_regex,
3319 })
3320 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3321 {
3322 Some(IndentSuggestion {
3323 basis_row: prev_row,
3324 delta: Ordering::Equal,
3325 within_error: within_error && !from_regex,
3326 })
3327 } else {
3328 None
3329 };
3330
3331 prev_row = row;
3332 prev_row_start = row_start;
3333 suggestion
3334 }))
3335 }
3336
3337 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3338 while row > 0 {
3339 row -= 1;
3340 if !self.is_line_blank(row) {
3341 return Some(row);
3342 }
3343 }
3344 None
3345 }
3346
3347 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3348 let captures = self.syntax.captures(range, &self.text, |grammar| {
3349 grammar
3350 .highlights_config
3351 .as_ref()
3352 .map(|config| &config.query)
3353 });
3354 let highlight_maps = captures
3355 .grammars()
3356 .iter()
3357 .map(|grammar| grammar.highlight_map())
3358 .collect();
3359 (captures, highlight_maps)
3360 }
3361
3362 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3363 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3364 /// returned in chunks where each chunk has a single syntax highlighting style and
3365 /// diagnostic status.
3366 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3367 let range = range.start.to_offset(self)..range.end.to_offset(self);
3368
3369 let mut syntax = None;
3370 if language_aware {
3371 syntax = Some(self.get_highlights(range.clone()));
3372 }
3373 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3374 let diagnostics = language_aware;
3375 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3376 }
3377
3378 pub fn highlighted_text_for_range<T: ToOffset>(
3379 &self,
3380 range: Range<T>,
3381 override_style: Option<HighlightStyle>,
3382 syntax_theme: &SyntaxTheme,
3383 ) -> HighlightedText {
3384 HighlightedText::from_buffer_range(
3385 range,
3386 &self.text,
3387 &self.syntax,
3388 override_style,
3389 syntax_theme,
3390 )
3391 }
3392
3393 /// Invokes the given callback for each line of text in the given range of the buffer.
3394 /// Uses callback to avoid allocating a string for each line.
3395 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3396 let mut line = String::new();
3397 let mut row = range.start.row;
3398 for chunk in self
3399 .as_rope()
3400 .chunks_in_range(range.to_offset(self))
3401 .chain(["\n"])
3402 {
3403 for (newline_ix, text) in chunk.split('\n').enumerate() {
3404 if newline_ix > 0 {
3405 callback(row, &line);
3406 row += 1;
3407 line.clear();
3408 }
3409 line.push_str(text);
3410 }
3411 }
3412 }
3413
3414 /// Iterates over every [`SyntaxLayer`] in the buffer.
3415 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3416 self.syntax_layers_for_range(0..self.len(), true)
3417 }
3418
3419 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3420 let offset = position.to_offset(self);
3421 self.syntax_layers_for_range(offset..offset, false)
3422 .filter(|l| l.node().end_byte() > offset)
3423 .last()
3424 }
3425
3426 pub fn syntax_layers_for_range<D: ToOffset>(
3427 &self,
3428 range: Range<D>,
3429 include_hidden: bool,
3430 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3431 self.syntax
3432 .layers_for_range(range, &self.text, include_hidden)
3433 }
3434
3435 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3436 &self,
3437 range: Range<D>,
3438 ) -> Option<SyntaxLayer<'_>> {
3439 let range = range.to_offset(self);
3440 self.syntax
3441 .layers_for_range(range, &self.text, false)
3442 .max_by(|a, b| {
3443 if a.depth != b.depth {
3444 a.depth.cmp(&b.depth)
3445 } else if a.offset.0 != b.offset.0 {
3446 a.offset.0.cmp(&b.offset.0)
3447 } else {
3448 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3449 }
3450 })
3451 }
3452
3453 /// Returns the main [`Language`].
3454 pub fn language(&self) -> Option<&Arc<Language>> {
3455 self.language.as_ref()
3456 }
3457
3458 /// Returns the [`Language`] at the given location.
3459 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3460 self.syntax_layer_at(position)
3461 .map(|info| info.language)
3462 .or(self.language.as_ref())
3463 }
3464
3465 /// Returns the settings for the language at the given location.
3466 pub fn settings_at<'a, D: ToOffset>(
3467 &'a self,
3468 position: D,
3469 cx: &'a App,
3470 ) -> Cow<'a, LanguageSettings> {
3471 language_settings(
3472 self.language_at(position).map(|l| l.name()),
3473 self.file.as_ref(),
3474 cx,
3475 )
3476 }
3477
3478 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3479 CharClassifier::new(self.language_scope_at(point))
3480 }
3481
3482 /// Returns the [`LanguageScope`] at the given location.
3483 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3484 let offset = position.to_offset(self);
3485 let mut scope = None;
3486 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3487
3488 // Use the layer that has the smallest node intersecting the given point.
3489 for layer in self
3490 .syntax
3491 .layers_for_range(offset..offset, &self.text, false)
3492 {
3493 let mut cursor = layer.node().walk();
3494
3495 let mut range = None;
3496 loop {
3497 let child_range = cursor.node().byte_range();
3498 if !child_range.contains(&offset) {
3499 break;
3500 }
3501
3502 range = Some(child_range);
3503 if cursor.goto_first_child_for_byte(offset).is_none() {
3504 break;
3505 }
3506 }
3507
3508 if let Some(range) = range
3509 && smallest_range_and_depth.as_ref().is_none_or(
3510 |(smallest_range, smallest_range_depth)| {
3511 if layer.depth > *smallest_range_depth {
3512 true
3513 } else if layer.depth == *smallest_range_depth {
3514 range.len() < smallest_range.len()
3515 } else {
3516 false
3517 }
3518 },
3519 )
3520 {
3521 smallest_range_and_depth = Some((range, layer.depth));
3522 scope = Some(LanguageScope {
3523 language: layer.language.clone(),
3524 override_id: layer.override_id(offset, &self.text),
3525 });
3526 }
3527 }
3528
3529 scope.or_else(|| {
3530 self.language.clone().map(|language| LanguageScope {
3531 language,
3532 override_id: None,
3533 })
3534 })
3535 }
3536
3537 /// Returns a tuple of the range and character kind of the word
3538 /// surrounding the given position.
3539 pub fn surrounding_word<T: ToOffset>(
3540 &self,
3541 start: T,
3542 scope_context: Option<CharScopeContext>,
3543 ) -> (Range<usize>, Option<CharKind>) {
3544 let mut start = start.to_offset(self);
3545 let mut end = start;
3546 let mut next_chars = self.chars_at(start).take(128).peekable();
3547 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3548
3549 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3550 let word_kind = cmp::max(
3551 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3552 next_chars.peek().copied().map(|c| classifier.kind(c)),
3553 );
3554
3555 for ch in prev_chars {
3556 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3557 start -= ch.len_utf8();
3558 } else {
3559 break;
3560 }
3561 }
3562
3563 for ch in next_chars {
3564 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3565 end += ch.len_utf8();
3566 } else {
3567 break;
3568 }
3569 }
3570
3571 (start..end, word_kind)
3572 }
3573
3574 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3575 /// range. When `require_larger` is true, the node found must be larger than the query range.
3576 ///
3577 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3578 /// be moved to the root of the tree.
3579 fn goto_node_enclosing_range(
3580 cursor: &mut tree_sitter::TreeCursor,
3581 query_range: &Range<usize>,
3582 require_larger: bool,
3583 ) -> bool {
3584 let mut ascending = false;
3585 loop {
3586 let mut range = cursor.node().byte_range();
3587 if query_range.is_empty() {
3588 // When the query range is empty and the current node starts after it, move to the
3589 // previous sibling to find the node the containing node.
3590 if range.start > query_range.start {
3591 cursor.goto_previous_sibling();
3592 range = cursor.node().byte_range();
3593 }
3594 } else {
3595 // When the query range is non-empty and the current node ends exactly at the start,
3596 // move to the next sibling to find a node that extends beyond the start.
3597 if range.end == query_range.start {
3598 cursor.goto_next_sibling();
3599 range = cursor.node().byte_range();
3600 }
3601 }
3602
3603 let encloses = range.contains_inclusive(query_range)
3604 && (!require_larger || range.len() > query_range.len());
3605 if !encloses {
3606 ascending = true;
3607 if !cursor.goto_parent() {
3608 return false;
3609 }
3610 continue;
3611 } else if ascending {
3612 return true;
3613 }
3614
3615 // Descend into the current node.
3616 if cursor
3617 .goto_first_child_for_byte(query_range.start)
3618 .is_none()
3619 {
3620 return true;
3621 }
3622 }
3623 }
3624
3625 pub fn syntax_ancestor<'a, T: ToOffset>(
3626 &'a self,
3627 range: Range<T>,
3628 ) -> Option<tree_sitter::Node<'a>> {
3629 let range = range.start.to_offset(self)..range.end.to_offset(self);
3630 let mut result: Option<tree_sitter::Node<'a>> = None;
3631 for layer in self
3632 .syntax
3633 .layers_for_range(range.clone(), &self.text, true)
3634 {
3635 let mut cursor = layer.node().walk();
3636
3637 // Find the node that both contains the range and is larger than it.
3638 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3639 continue;
3640 }
3641
3642 let left_node = cursor.node();
3643 let mut layer_result = left_node;
3644
3645 // For an empty range, try to find another node immediately to the right of the range.
3646 if left_node.end_byte() == range.start {
3647 let mut right_node = None;
3648 while !cursor.goto_next_sibling() {
3649 if !cursor.goto_parent() {
3650 break;
3651 }
3652 }
3653
3654 while cursor.node().start_byte() == range.start {
3655 right_node = Some(cursor.node());
3656 if !cursor.goto_first_child() {
3657 break;
3658 }
3659 }
3660
3661 // If there is a candidate node on both sides of the (empty) range, then
3662 // decide between the two by favoring a named node over an anonymous token.
3663 // If both nodes are the same in that regard, favor the right one.
3664 if let Some(right_node) = right_node
3665 && (right_node.is_named() || !left_node.is_named())
3666 {
3667 layer_result = right_node;
3668 }
3669 }
3670
3671 if let Some(previous_result) = &result
3672 && previous_result.byte_range().len() < layer_result.byte_range().len()
3673 {
3674 continue;
3675 }
3676 result = Some(layer_result);
3677 }
3678
3679 result
3680 }
3681
3682 /// Find the previous sibling syntax node at the given range.
3683 ///
3684 /// This function locates the syntax node that precedes the node containing
3685 /// the given range. It searches hierarchically by:
3686 /// 1. Finding the node that contains the given range
3687 /// 2. Looking for the previous sibling at the same tree level
3688 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3689 ///
3690 /// Returns `None` if there is no previous sibling at any ancestor level.
3691 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3692 &'a self,
3693 range: Range<T>,
3694 ) -> Option<tree_sitter::Node<'a>> {
3695 let range = range.start.to_offset(self)..range.end.to_offset(self);
3696 let mut result: Option<tree_sitter::Node<'a>> = None;
3697
3698 for layer in self
3699 .syntax
3700 .layers_for_range(range.clone(), &self.text, true)
3701 {
3702 let mut cursor = layer.node().walk();
3703
3704 // Find the node that contains the range
3705 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3706 continue;
3707 }
3708
3709 // Look for the previous sibling, moving up ancestor levels if needed
3710 loop {
3711 if cursor.goto_previous_sibling() {
3712 let layer_result = cursor.node();
3713
3714 if let Some(previous_result) = &result {
3715 if previous_result.byte_range().end < layer_result.byte_range().end {
3716 continue;
3717 }
3718 }
3719 result = Some(layer_result);
3720 break;
3721 }
3722
3723 // No sibling found at this level, try moving up to parent
3724 if !cursor.goto_parent() {
3725 break;
3726 }
3727 }
3728 }
3729
3730 result
3731 }
3732
3733 /// Find the next sibling syntax node at the given range.
3734 ///
3735 /// This function locates the syntax node that follows the node containing
3736 /// the given range. It searches hierarchically by:
3737 /// 1. Finding the node that contains the given range
3738 /// 2. Looking for the next sibling at the same tree level
3739 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3740 ///
3741 /// Returns `None` if there is no next sibling at any ancestor level.
3742 pub fn syntax_next_sibling<'a, T: ToOffset>(
3743 &'a self,
3744 range: Range<T>,
3745 ) -> Option<tree_sitter::Node<'a>> {
3746 let range = range.start.to_offset(self)..range.end.to_offset(self);
3747 let mut result: Option<tree_sitter::Node<'a>> = None;
3748
3749 for layer in self
3750 .syntax
3751 .layers_for_range(range.clone(), &self.text, true)
3752 {
3753 let mut cursor = layer.node().walk();
3754
3755 // Find the node that contains the range
3756 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3757 continue;
3758 }
3759
3760 // Look for the next sibling, moving up ancestor levels if needed
3761 loop {
3762 if cursor.goto_next_sibling() {
3763 let layer_result = cursor.node();
3764
3765 if let Some(previous_result) = &result {
3766 if previous_result.byte_range().start > layer_result.byte_range().start {
3767 continue;
3768 }
3769 }
3770 result = Some(layer_result);
3771 break;
3772 }
3773
3774 // No sibling found at this level, try moving up to parent
3775 if !cursor.goto_parent() {
3776 break;
3777 }
3778 }
3779 }
3780
3781 result
3782 }
3783
3784 /// Returns the root syntax node within the given row
3785 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3786 let start_offset = position.to_offset(self);
3787
3788 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3789
3790 let layer = self
3791 .syntax
3792 .layers_for_range(start_offset..start_offset, &self.text, true)
3793 .next()?;
3794
3795 let mut cursor = layer.node().walk();
3796
3797 // Descend to the first leaf that touches the start of the range.
3798 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3799 if cursor.node().end_byte() == start_offset {
3800 cursor.goto_next_sibling();
3801 }
3802 }
3803
3804 // Ascend to the root node within the same row.
3805 while cursor.goto_parent() {
3806 if cursor.node().start_position().row != row {
3807 break;
3808 }
3809 }
3810
3811 Some(cursor.node())
3812 }
3813
3814 /// Returns the outline for the buffer.
3815 ///
3816 /// This method allows passing an optional [`SyntaxTheme`] to
3817 /// syntax-highlight the returned symbols.
3818 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3819 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3820 }
3821
3822 /// Returns all the symbols that contain the given position.
3823 ///
3824 /// This method allows passing an optional [`SyntaxTheme`] to
3825 /// syntax-highlight the returned symbols.
3826 pub fn symbols_containing<T: ToOffset>(
3827 &self,
3828 position: T,
3829 theme: Option<&SyntaxTheme>,
3830 ) -> Vec<OutlineItem<Anchor>> {
3831 let position = position.to_offset(self);
3832 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3833 let end = self.clip_offset(position + 1, Bias::Right);
3834 let mut items = self.outline_items_containing(start..end, false, theme);
3835 let mut prev_depth = None;
3836 items.retain(|item| {
3837 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3838 prev_depth = Some(item.depth);
3839 result
3840 });
3841 items
3842 }
3843
3844 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3845 let range = range.to_offset(self);
3846 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3847 grammar.outline_config.as_ref().map(|c| &c.query)
3848 });
3849 let configs = matches
3850 .grammars()
3851 .iter()
3852 .map(|g| g.outline_config.as_ref().unwrap())
3853 .collect::<Vec<_>>();
3854
3855 while let Some(mat) = matches.peek() {
3856 let config = &configs[mat.grammar_index];
3857 let containing_item_node = maybe!({
3858 let item_node = mat.captures.iter().find_map(|cap| {
3859 if cap.index == config.item_capture_ix {
3860 Some(cap.node)
3861 } else {
3862 None
3863 }
3864 })?;
3865
3866 let item_byte_range = item_node.byte_range();
3867 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3868 None
3869 } else {
3870 Some(item_node)
3871 }
3872 });
3873
3874 if let Some(item_node) = containing_item_node {
3875 return Some(
3876 Point::from_ts_point(item_node.start_position())
3877 ..Point::from_ts_point(item_node.end_position()),
3878 );
3879 }
3880
3881 matches.advance();
3882 }
3883 None
3884 }
3885
3886 pub fn outline_items_containing<T: ToOffset>(
3887 &self,
3888 range: Range<T>,
3889 include_extra_context: bool,
3890 theme: Option<&SyntaxTheme>,
3891 ) -> Vec<OutlineItem<Anchor>> {
3892 self.outline_items_containing_internal(
3893 range,
3894 include_extra_context,
3895 theme,
3896 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3897 )
3898 }
3899
3900 pub fn outline_items_as_points_containing<T: ToOffset>(
3901 &self,
3902 range: Range<T>,
3903 include_extra_context: bool,
3904 theme: Option<&SyntaxTheme>,
3905 ) -> Vec<OutlineItem<Point>> {
3906 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3907 range
3908 })
3909 }
3910
3911 fn outline_items_containing_internal<T: ToOffset, U>(
3912 &self,
3913 range: Range<T>,
3914 include_extra_context: bool,
3915 theme: Option<&SyntaxTheme>,
3916 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3917 ) -> Vec<OutlineItem<U>> {
3918 let range = range.to_offset(self);
3919 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3920 grammar.outline_config.as_ref().map(|c| &c.query)
3921 });
3922
3923 let mut items = Vec::new();
3924 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3925 while let Some(mat) = matches.peek() {
3926 let config = matches.grammars()[mat.grammar_index]
3927 .outline_config
3928 .as_ref()
3929 .unwrap();
3930 if let Some(item) =
3931 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3932 {
3933 items.push(item);
3934 } else if let Some(capture) = mat
3935 .captures
3936 .iter()
3937 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3938 {
3939 let capture_range = capture.node.start_position()..capture.node.end_position();
3940 let mut capture_row_range =
3941 capture_range.start.row as u32..capture_range.end.row as u32;
3942 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3943 {
3944 capture_row_range.end -= 1;
3945 }
3946 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3947 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3948 last_row_range.end = capture_row_range.end;
3949 } else {
3950 annotation_row_ranges.push(capture_row_range);
3951 }
3952 } else {
3953 annotation_row_ranges.push(capture_row_range);
3954 }
3955 }
3956 matches.advance();
3957 }
3958
3959 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3960
3961 // Assign depths based on containment relationships and convert to anchors.
3962 let mut item_ends_stack = Vec::<Point>::new();
3963 let mut anchor_items = Vec::new();
3964 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3965 for item in items {
3966 while let Some(last_end) = item_ends_stack.last().copied() {
3967 if last_end < item.range.end {
3968 item_ends_stack.pop();
3969 } else {
3970 break;
3971 }
3972 }
3973
3974 let mut annotation_row_range = None;
3975 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3976 let row_preceding_item = item.range.start.row.saturating_sub(1);
3977 if next_annotation_row_range.end < row_preceding_item {
3978 annotation_row_ranges.next();
3979 } else {
3980 if next_annotation_row_range.end == row_preceding_item {
3981 annotation_row_range = Some(next_annotation_row_range.clone());
3982 annotation_row_ranges.next();
3983 }
3984 break;
3985 }
3986 }
3987
3988 anchor_items.push(OutlineItem {
3989 depth: item_ends_stack.len(),
3990 range: range_callback(self, item.range.clone()),
3991 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3992 text: item.text,
3993 highlight_ranges: item.highlight_ranges,
3994 name_ranges: item.name_ranges,
3995 body_range: item.body_range.map(|r| range_callback(self, r)),
3996 annotation_range: annotation_row_range.map(|annotation_range| {
3997 let point_range = Point::new(annotation_range.start, 0)
3998 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3999 range_callback(self, point_range)
4000 }),
4001 });
4002 item_ends_stack.push(item.range.end);
4003 }
4004
4005 anchor_items
4006 }
4007
4008 fn next_outline_item(
4009 &self,
4010 config: &OutlineConfig,
4011 mat: &SyntaxMapMatch,
4012 range: &Range<usize>,
4013 include_extra_context: bool,
4014 theme: Option<&SyntaxTheme>,
4015 ) -> Option<OutlineItem<Point>> {
4016 let item_node = mat.captures.iter().find_map(|cap| {
4017 if cap.index == config.item_capture_ix {
4018 Some(cap.node)
4019 } else {
4020 None
4021 }
4022 })?;
4023
4024 let item_byte_range = item_node.byte_range();
4025 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4026 return None;
4027 }
4028 let item_point_range = Point::from_ts_point(item_node.start_position())
4029 ..Point::from_ts_point(item_node.end_position());
4030
4031 let mut open_point = None;
4032 let mut close_point = None;
4033
4034 let mut buffer_ranges = Vec::new();
4035 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4036 let mut range = node.start_byte()..node.end_byte();
4037 let start = node.start_position();
4038 if node.end_position().row > start.row {
4039 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4040 }
4041
4042 if !range.is_empty() {
4043 buffer_ranges.push((range, node_is_name));
4044 }
4045 };
4046
4047 for capture in mat.captures {
4048 if capture.index == config.name_capture_ix {
4049 add_to_buffer_ranges(capture.node, true);
4050 } else if Some(capture.index) == config.context_capture_ix
4051 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4052 {
4053 add_to_buffer_ranges(capture.node, false);
4054 } else {
4055 if Some(capture.index) == config.open_capture_ix {
4056 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4057 } else if Some(capture.index) == config.close_capture_ix {
4058 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4059 }
4060 }
4061 }
4062
4063 if buffer_ranges.is_empty() {
4064 return None;
4065 }
4066 let source_range_for_text =
4067 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4068
4069 let mut text = String::new();
4070 let mut highlight_ranges = Vec::new();
4071 let mut name_ranges = Vec::new();
4072 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4073 let mut last_buffer_range_end = 0;
4074 for (buffer_range, is_name) in buffer_ranges {
4075 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4076 if space_added {
4077 text.push(' ');
4078 }
4079 let before_append_len = text.len();
4080 let mut offset = buffer_range.start;
4081 chunks.seek(buffer_range.clone());
4082 for mut chunk in chunks.by_ref() {
4083 if chunk.text.len() > buffer_range.end - offset {
4084 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4085 offset = buffer_range.end;
4086 } else {
4087 offset += chunk.text.len();
4088 }
4089 let style = chunk
4090 .syntax_highlight_id
4091 .zip(theme)
4092 .and_then(|(highlight, theme)| highlight.style(theme));
4093 if let Some(style) = style {
4094 let start = text.len();
4095 let end = start + chunk.text.len();
4096 highlight_ranges.push((start..end, style));
4097 }
4098 text.push_str(chunk.text);
4099 if offset >= buffer_range.end {
4100 break;
4101 }
4102 }
4103 if is_name {
4104 let after_append_len = text.len();
4105 let start = if space_added && !name_ranges.is_empty() {
4106 before_append_len - 1
4107 } else {
4108 before_append_len
4109 };
4110 name_ranges.push(start..after_append_len);
4111 }
4112 last_buffer_range_end = buffer_range.end;
4113 }
4114
4115 Some(OutlineItem {
4116 depth: 0, // We'll calculate the depth later
4117 range: item_point_range,
4118 source_range_for_text: source_range_for_text.to_point(self),
4119 text,
4120 highlight_ranges,
4121 name_ranges,
4122 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4123 annotation_range: None,
4124 })
4125 }
4126
4127 pub fn function_body_fold_ranges<T: ToOffset>(
4128 &self,
4129 within: Range<T>,
4130 ) -> impl Iterator<Item = Range<usize>> + '_ {
4131 self.text_object_ranges(within, TreeSitterOptions::default())
4132 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4133 }
4134
4135 /// For each grammar in the language, runs the provided
4136 /// [`tree_sitter::Query`] against the given range.
4137 pub fn matches(
4138 &self,
4139 range: Range<usize>,
4140 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4141 ) -> SyntaxMapMatches<'_> {
4142 self.syntax.matches(range, self, query)
4143 }
4144
4145 pub fn all_bracket_ranges(
4146 &self,
4147 range: Range<usize>,
4148 ) -> impl Iterator<Item = BracketMatch> + '_ {
4149 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4150 grammar.brackets_config.as_ref().map(|c| &c.query)
4151 });
4152 let configs = matches
4153 .grammars()
4154 .iter()
4155 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4156 .collect::<Vec<_>>();
4157
4158 iter::from_fn(move || {
4159 while let Some(mat) = matches.peek() {
4160 let mut open = None;
4161 let mut close = None;
4162 let config = &configs[mat.grammar_index];
4163 let pattern = &config.patterns[mat.pattern_index];
4164 for capture in mat.captures {
4165 if capture.index == config.open_capture_ix {
4166 open = Some(capture.node.byte_range());
4167 } else if capture.index == config.close_capture_ix {
4168 close = Some(capture.node.byte_range());
4169 }
4170 }
4171
4172 matches.advance();
4173
4174 let Some((open_range, close_range)) = open.zip(close) else {
4175 continue;
4176 };
4177
4178 let bracket_range = open_range.start..=close_range.end;
4179 if !bracket_range.overlaps(&range) {
4180 continue;
4181 }
4182
4183 return Some(BracketMatch {
4184 open_range,
4185 close_range,
4186 newline_only: pattern.newline_only,
4187 });
4188 }
4189 None
4190 })
4191 }
4192
4193 /// Returns bracket range pairs overlapping or adjacent to `range`
4194 pub fn bracket_ranges<T: ToOffset>(
4195 &self,
4196 range: Range<T>,
4197 ) -> impl Iterator<Item = BracketMatch> + '_ {
4198 // Find bracket pairs that *inclusively* contain the given range.
4199 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4200 self.all_bracket_ranges(range)
4201 .filter(|pair| !pair.newline_only)
4202 }
4203
4204 pub fn debug_variables_query<T: ToOffset>(
4205 &self,
4206 range: Range<T>,
4207 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4208 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4209
4210 let mut matches = self.syntax.matches_with_options(
4211 range.clone(),
4212 &self.text,
4213 TreeSitterOptions::default(),
4214 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4215 );
4216
4217 let configs = matches
4218 .grammars()
4219 .iter()
4220 .map(|grammar| grammar.debug_variables_config.as_ref())
4221 .collect::<Vec<_>>();
4222
4223 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4224
4225 iter::from_fn(move || {
4226 loop {
4227 while let Some(capture) = captures.pop() {
4228 if capture.0.overlaps(&range) {
4229 return Some(capture);
4230 }
4231 }
4232
4233 let mat = matches.peek()?;
4234
4235 let Some(config) = configs[mat.grammar_index].as_ref() else {
4236 matches.advance();
4237 continue;
4238 };
4239
4240 for capture in mat.captures {
4241 let Some(ix) = config
4242 .objects_by_capture_ix
4243 .binary_search_by_key(&capture.index, |e| e.0)
4244 .ok()
4245 else {
4246 continue;
4247 };
4248 let text_object = config.objects_by_capture_ix[ix].1;
4249 let byte_range = capture.node.byte_range();
4250
4251 let mut found = false;
4252 for (range, existing) in captures.iter_mut() {
4253 if existing == &text_object {
4254 range.start = range.start.min(byte_range.start);
4255 range.end = range.end.max(byte_range.end);
4256 found = true;
4257 break;
4258 }
4259 }
4260
4261 if !found {
4262 captures.push((byte_range, text_object));
4263 }
4264 }
4265
4266 matches.advance();
4267 }
4268 })
4269 }
4270
4271 pub fn text_object_ranges<T: ToOffset>(
4272 &self,
4273 range: Range<T>,
4274 options: TreeSitterOptions,
4275 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4276 let range =
4277 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4278
4279 let mut matches =
4280 self.syntax
4281 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4282 grammar.text_object_config.as_ref().map(|c| &c.query)
4283 });
4284
4285 let configs = matches
4286 .grammars()
4287 .iter()
4288 .map(|grammar| grammar.text_object_config.as_ref())
4289 .collect::<Vec<_>>();
4290
4291 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4292
4293 iter::from_fn(move || {
4294 loop {
4295 while let Some(capture) = captures.pop() {
4296 if capture.0.overlaps(&range) {
4297 return Some(capture);
4298 }
4299 }
4300
4301 let mat = matches.peek()?;
4302
4303 let Some(config) = configs[mat.grammar_index].as_ref() else {
4304 matches.advance();
4305 continue;
4306 };
4307
4308 for capture in mat.captures {
4309 let Some(ix) = config
4310 .text_objects_by_capture_ix
4311 .binary_search_by_key(&capture.index, |e| e.0)
4312 .ok()
4313 else {
4314 continue;
4315 };
4316 let text_object = config.text_objects_by_capture_ix[ix].1;
4317 let byte_range = capture.node.byte_range();
4318
4319 let mut found = false;
4320 for (range, existing) in captures.iter_mut() {
4321 if existing == &text_object {
4322 range.start = range.start.min(byte_range.start);
4323 range.end = range.end.max(byte_range.end);
4324 found = true;
4325 break;
4326 }
4327 }
4328
4329 if !found {
4330 captures.push((byte_range, text_object));
4331 }
4332 }
4333
4334 matches.advance();
4335 }
4336 })
4337 }
4338
4339 /// Returns enclosing bracket ranges containing the given range
4340 pub fn enclosing_bracket_ranges<T: ToOffset>(
4341 &self,
4342 range: Range<T>,
4343 ) -> impl Iterator<Item = BracketMatch> + '_ {
4344 let range = range.start.to_offset(self)..range.end.to_offset(self);
4345
4346 self.bracket_ranges(range.clone()).filter(move |pair| {
4347 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4348 })
4349 }
4350
4351 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4352 ///
4353 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4354 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4355 &self,
4356 range: Range<T>,
4357 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4358 ) -> Option<(Range<usize>, Range<usize>)> {
4359 let range = range.start.to_offset(self)..range.end.to_offset(self);
4360
4361 // Get the ranges of the innermost pair of brackets.
4362 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4363
4364 for pair in self.enclosing_bracket_ranges(range) {
4365 if let Some(range_filter) = range_filter
4366 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4367 {
4368 continue;
4369 }
4370
4371 let len = pair.close_range.end - pair.open_range.start;
4372
4373 if let Some((existing_open, existing_close)) = &result {
4374 let existing_len = existing_close.end - existing_open.start;
4375 if len > existing_len {
4376 continue;
4377 }
4378 }
4379
4380 result = Some((pair.open_range, pair.close_range));
4381 }
4382
4383 result
4384 }
4385
4386 /// Returns anchor ranges for any matches of the redaction query.
4387 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4388 /// will be run on the relevant section of the buffer.
4389 pub fn redacted_ranges<T: ToOffset>(
4390 &self,
4391 range: Range<T>,
4392 ) -> impl Iterator<Item = Range<usize>> + '_ {
4393 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4394 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4395 grammar
4396 .redactions_config
4397 .as_ref()
4398 .map(|config| &config.query)
4399 });
4400
4401 let configs = syntax_matches
4402 .grammars()
4403 .iter()
4404 .map(|grammar| grammar.redactions_config.as_ref())
4405 .collect::<Vec<_>>();
4406
4407 iter::from_fn(move || {
4408 let redacted_range = syntax_matches
4409 .peek()
4410 .and_then(|mat| {
4411 configs[mat.grammar_index].and_then(|config| {
4412 mat.captures
4413 .iter()
4414 .find(|capture| capture.index == config.redaction_capture_ix)
4415 })
4416 })
4417 .map(|mat| mat.node.byte_range());
4418 syntax_matches.advance();
4419 redacted_range
4420 })
4421 }
4422
4423 pub fn injections_intersecting_range<T: ToOffset>(
4424 &self,
4425 range: Range<T>,
4426 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4427 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4428
4429 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4430 grammar
4431 .injection_config
4432 .as_ref()
4433 .map(|config| &config.query)
4434 });
4435
4436 let configs = syntax_matches
4437 .grammars()
4438 .iter()
4439 .map(|grammar| grammar.injection_config.as_ref())
4440 .collect::<Vec<_>>();
4441
4442 iter::from_fn(move || {
4443 let ranges = syntax_matches.peek().and_then(|mat| {
4444 let config = &configs[mat.grammar_index]?;
4445 let content_capture_range = mat.captures.iter().find_map(|capture| {
4446 if capture.index == config.content_capture_ix {
4447 Some(capture.node.byte_range())
4448 } else {
4449 None
4450 }
4451 })?;
4452 let language = self.language_at(content_capture_range.start)?;
4453 Some((content_capture_range, language))
4454 });
4455 syntax_matches.advance();
4456 ranges
4457 })
4458 }
4459
4460 pub fn runnable_ranges(
4461 &self,
4462 offset_range: Range<usize>,
4463 ) -> impl Iterator<Item = RunnableRange> + '_ {
4464 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4465 grammar.runnable_config.as_ref().map(|config| &config.query)
4466 });
4467
4468 let test_configs = syntax_matches
4469 .grammars()
4470 .iter()
4471 .map(|grammar| grammar.runnable_config.as_ref())
4472 .collect::<Vec<_>>();
4473
4474 iter::from_fn(move || {
4475 loop {
4476 let mat = syntax_matches.peek()?;
4477
4478 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4479 let mut run_range = None;
4480 let full_range = mat.captures.iter().fold(
4481 Range {
4482 start: usize::MAX,
4483 end: 0,
4484 },
4485 |mut acc, next| {
4486 let byte_range = next.node.byte_range();
4487 if acc.start > byte_range.start {
4488 acc.start = byte_range.start;
4489 }
4490 if acc.end < byte_range.end {
4491 acc.end = byte_range.end;
4492 }
4493 acc
4494 },
4495 );
4496 if full_range.start > full_range.end {
4497 // We did not find a full spanning range of this match.
4498 return None;
4499 }
4500 let extra_captures: SmallVec<[_; 1]> =
4501 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4502 test_configs
4503 .extra_captures
4504 .get(capture.index as usize)
4505 .cloned()
4506 .and_then(|tag_name| match tag_name {
4507 RunnableCapture::Named(name) => {
4508 Some((capture.node.byte_range(), name))
4509 }
4510 RunnableCapture::Run => {
4511 let _ = run_range.insert(capture.node.byte_range());
4512 None
4513 }
4514 })
4515 }));
4516 let run_range = run_range?;
4517 let tags = test_configs
4518 .query
4519 .property_settings(mat.pattern_index)
4520 .iter()
4521 .filter_map(|property| {
4522 if *property.key == *"tag" {
4523 property
4524 .value
4525 .as_ref()
4526 .map(|value| RunnableTag(value.to_string().into()))
4527 } else {
4528 None
4529 }
4530 })
4531 .collect();
4532 let extra_captures = extra_captures
4533 .into_iter()
4534 .map(|(range, name)| {
4535 (
4536 name.to_string(),
4537 self.text_for_range(range).collect::<String>(),
4538 )
4539 })
4540 .collect();
4541 // All tags should have the same range.
4542 Some(RunnableRange {
4543 run_range,
4544 full_range,
4545 runnable: Runnable {
4546 tags,
4547 language: mat.language,
4548 buffer: self.remote_id(),
4549 },
4550 extra_captures,
4551 buffer_id: self.remote_id(),
4552 })
4553 });
4554
4555 syntax_matches.advance();
4556 if test_range.is_some() {
4557 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4558 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4559 return test_range;
4560 }
4561 }
4562 })
4563 }
4564
4565 /// Returns selections for remote peers intersecting the given range.
4566 #[allow(clippy::type_complexity)]
4567 pub fn selections_in_range(
4568 &self,
4569 range: Range<Anchor>,
4570 include_local: bool,
4571 ) -> impl Iterator<
4572 Item = (
4573 ReplicaId,
4574 bool,
4575 CursorShape,
4576 impl Iterator<Item = &Selection<Anchor>> + '_,
4577 ),
4578 > + '_ {
4579 self.remote_selections
4580 .iter()
4581 .filter(move |(replica_id, set)| {
4582 (include_local || **replica_id != self.text.replica_id())
4583 && !set.selections.is_empty()
4584 })
4585 .map(move |(replica_id, set)| {
4586 let start_ix = match set.selections.binary_search_by(|probe| {
4587 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4588 }) {
4589 Ok(ix) | Err(ix) => ix,
4590 };
4591 let end_ix = match set.selections.binary_search_by(|probe| {
4592 probe.start.cmp(&range.end, self).then(Ordering::Less)
4593 }) {
4594 Ok(ix) | Err(ix) => ix,
4595 };
4596
4597 (
4598 *replica_id,
4599 set.line_mode,
4600 set.cursor_shape,
4601 set.selections[start_ix..end_ix].iter(),
4602 )
4603 })
4604 }
4605
4606 /// Returns if the buffer contains any diagnostics.
4607 pub fn has_diagnostics(&self) -> bool {
4608 !self.diagnostics.is_empty()
4609 }
4610
4611 /// Returns all the diagnostics intersecting the given range.
4612 pub fn diagnostics_in_range<'a, T, O>(
4613 &'a self,
4614 search_range: Range<T>,
4615 reversed: bool,
4616 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4617 where
4618 T: 'a + Clone + ToOffset,
4619 O: 'a + FromAnchor,
4620 {
4621 let mut iterators: Vec<_> = self
4622 .diagnostics
4623 .iter()
4624 .map(|(_, collection)| {
4625 collection
4626 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4627 .peekable()
4628 })
4629 .collect();
4630
4631 std::iter::from_fn(move || {
4632 let (next_ix, _) = iterators
4633 .iter_mut()
4634 .enumerate()
4635 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4636 .min_by(|(_, a), (_, b)| {
4637 let cmp = a
4638 .range
4639 .start
4640 .cmp(&b.range.start, self)
4641 // when range is equal, sort by diagnostic severity
4642 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4643 // and stabilize order with group_id
4644 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4645 if reversed { cmp.reverse() } else { cmp }
4646 })?;
4647 iterators[next_ix]
4648 .next()
4649 .map(
4650 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4651 diagnostic,
4652 range: FromAnchor::from_anchor(&range.start, self)
4653 ..FromAnchor::from_anchor(&range.end, self),
4654 },
4655 )
4656 })
4657 }
4658
4659 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4660 /// should be used instead.
4661 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4662 &self.diagnostics
4663 }
4664
4665 /// Returns all the diagnostic groups associated with the given
4666 /// language server ID. If no language server ID is provided,
4667 /// all diagnostics groups are returned.
4668 pub fn diagnostic_groups(
4669 &self,
4670 language_server_id: Option<LanguageServerId>,
4671 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4672 let mut groups = Vec::new();
4673
4674 if let Some(language_server_id) = language_server_id {
4675 if let Ok(ix) = self
4676 .diagnostics
4677 .binary_search_by_key(&language_server_id, |e| e.0)
4678 {
4679 self.diagnostics[ix]
4680 .1
4681 .groups(language_server_id, &mut groups, self);
4682 }
4683 } else {
4684 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4685 diagnostics.groups(*language_server_id, &mut groups, self);
4686 }
4687 }
4688
4689 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4690 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4691 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4692 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4693 });
4694
4695 groups
4696 }
4697
4698 /// Returns an iterator over the diagnostics for the given group.
4699 pub fn diagnostic_group<O>(
4700 &self,
4701 group_id: usize,
4702 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4703 where
4704 O: FromAnchor + 'static,
4705 {
4706 self.diagnostics
4707 .iter()
4708 .flat_map(move |(_, set)| set.group(group_id, self))
4709 }
4710
4711 /// An integer version number that accounts for all updates besides
4712 /// the buffer's text itself (which is versioned via a version vector).
4713 pub fn non_text_state_update_count(&self) -> usize {
4714 self.non_text_state_update_count
4715 }
4716
4717 /// An integer version that changes when the buffer's syntax changes.
4718 pub fn syntax_update_count(&self) -> usize {
4719 self.syntax.update_count()
4720 }
4721
4722 /// Returns a snapshot of underlying file.
4723 pub fn file(&self) -> Option<&Arc<dyn File>> {
4724 self.file.as_ref()
4725 }
4726
4727 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4728 if let Some(file) = self.file() {
4729 if file.path().file_name().is_none() || include_root {
4730 Some(file.full_path(cx).to_string_lossy().into_owned())
4731 } else {
4732 Some(file.path().display(file.path_style(cx)).to_string())
4733 }
4734 } else {
4735 None
4736 }
4737 }
4738
4739 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4740 let query_str = query.fuzzy_contents;
4741 if query_str.is_some_and(|query| query.is_empty()) {
4742 return BTreeMap::default();
4743 }
4744
4745 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4746 language,
4747 override_id: None,
4748 }));
4749
4750 let mut query_ix = 0;
4751 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4752 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4753
4754 let mut words = BTreeMap::default();
4755 let mut current_word_start_ix = None;
4756 let mut chunk_ix = query.range.start;
4757 for chunk in self.chunks(query.range, false) {
4758 for (i, c) in chunk.text.char_indices() {
4759 let ix = chunk_ix + i;
4760 if classifier.is_word(c) {
4761 if current_word_start_ix.is_none() {
4762 current_word_start_ix = Some(ix);
4763 }
4764
4765 if let Some(query_chars) = &query_chars
4766 && query_ix < query_len
4767 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4768 {
4769 query_ix += 1;
4770 }
4771 continue;
4772 } else if let Some(word_start) = current_word_start_ix.take()
4773 && query_ix == query_len
4774 {
4775 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4776 let mut word_text = self.text_for_range(word_start..ix).peekable();
4777 let first_char = word_text
4778 .peek()
4779 .and_then(|first_chunk| first_chunk.chars().next());
4780 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4781 if !query.skip_digits
4782 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4783 {
4784 words.insert(word_text.collect(), word_range);
4785 }
4786 }
4787 query_ix = 0;
4788 }
4789 chunk_ix += chunk.text.len();
4790 }
4791
4792 words
4793 }
4794}
4795
4796pub struct WordsQuery<'a> {
4797 /// Only returns words with all chars from the fuzzy string in them.
4798 pub fuzzy_contents: Option<&'a str>,
4799 /// Skips words that start with a digit.
4800 pub skip_digits: bool,
4801 /// Buffer offset range, to look for words.
4802 pub range: Range<usize>,
4803}
4804
4805fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4806 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4807}
4808
4809fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4810 let mut result = IndentSize::spaces(0);
4811 for c in text {
4812 let kind = match c {
4813 ' ' => IndentKind::Space,
4814 '\t' => IndentKind::Tab,
4815 _ => break,
4816 };
4817 if result.len == 0 {
4818 result.kind = kind;
4819 }
4820 result.len += 1;
4821 }
4822 result
4823}
4824
4825impl Clone for BufferSnapshot {
4826 fn clone(&self) -> Self {
4827 Self {
4828 text: self.text.clone(),
4829 syntax: self.syntax.clone(),
4830 file: self.file.clone(),
4831 remote_selections: self.remote_selections.clone(),
4832 diagnostics: self.diagnostics.clone(),
4833 language: self.language.clone(),
4834 non_text_state_update_count: self.non_text_state_update_count,
4835 }
4836 }
4837}
4838
4839impl Deref for BufferSnapshot {
4840 type Target = text::BufferSnapshot;
4841
4842 fn deref(&self) -> &Self::Target {
4843 &self.text
4844 }
4845}
4846
4847unsafe impl Send for BufferChunks<'_> {}
4848
4849impl<'a> BufferChunks<'a> {
4850 pub(crate) fn new(
4851 text: &'a Rope,
4852 range: Range<usize>,
4853 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4854 diagnostics: bool,
4855 buffer_snapshot: Option<&'a BufferSnapshot>,
4856 ) -> Self {
4857 let mut highlights = None;
4858 if let Some((captures, highlight_maps)) = syntax {
4859 highlights = Some(BufferChunkHighlights {
4860 captures,
4861 next_capture: None,
4862 stack: Default::default(),
4863 highlight_maps,
4864 })
4865 }
4866
4867 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4868 let chunks = text.chunks_in_range(range.clone());
4869
4870 let mut this = BufferChunks {
4871 range,
4872 buffer_snapshot,
4873 chunks,
4874 diagnostic_endpoints,
4875 error_depth: 0,
4876 warning_depth: 0,
4877 information_depth: 0,
4878 hint_depth: 0,
4879 unnecessary_depth: 0,
4880 underline: true,
4881 highlights,
4882 };
4883 this.initialize_diagnostic_endpoints();
4884 this
4885 }
4886
4887 /// Seeks to the given byte offset in the buffer.
4888 pub fn seek(&mut self, range: Range<usize>) {
4889 let old_range = std::mem::replace(&mut self.range, range.clone());
4890 self.chunks.set_range(self.range.clone());
4891 if let Some(highlights) = self.highlights.as_mut() {
4892 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4893 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4894 highlights
4895 .stack
4896 .retain(|(end_offset, _)| *end_offset > range.start);
4897 if let Some(capture) = &highlights.next_capture
4898 && range.start >= capture.node.start_byte()
4899 {
4900 let next_capture_end = capture.node.end_byte();
4901 if range.start < next_capture_end {
4902 highlights.stack.push((
4903 next_capture_end,
4904 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4905 ));
4906 }
4907 highlights.next_capture.take();
4908 }
4909 } else if let Some(snapshot) = self.buffer_snapshot {
4910 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4911 *highlights = BufferChunkHighlights {
4912 captures,
4913 next_capture: None,
4914 stack: Default::default(),
4915 highlight_maps,
4916 };
4917 } else {
4918 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4919 // Seeking such BufferChunks is not supported.
4920 debug_assert!(
4921 false,
4922 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4923 );
4924 }
4925
4926 highlights.captures.set_byte_range(self.range.clone());
4927 self.initialize_diagnostic_endpoints();
4928 }
4929 }
4930
4931 fn initialize_diagnostic_endpoints(&mut self) {
4932 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4933 && let Some(buffer) = self.buffer_snapshot
4934 {
4935 let mut diagnostic_endpoints = Vec::new();
4936 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4937 diagnostic_endpoints.push(DiagnosticEndpoint {
4938 offset: entry.range.start,
4939 is_start: true,
4940 severity: entry.diagnostic.severity,
4941 is_unnecessary: entry.diagnostic.is_unnecessary,
4942 underline: entry.diagnostic.underline,
4943 });
4944 diagnostic_endpoints.push(DiagnosticEndpoint {
4945 offset: entry.range.end,
4946 is_start: false,
4947 severity: entry.diagnostic.severity,
4948 is_unnecessary: entry.diagnostic.is_unnecessary,
4949 underline: entry.diagnostic.underline,
4950 });
4951 }
4952 diagnostic_endpoints
4953 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4954 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4955 self.hint_depth = 0;
4956 self.error_depth = 0;
4957 self.warning_depth = 0;
4958 self.information_depth = 0;
4959 }
4960 }
4961
4962 /// The current byte offset in the buffer.
4963 pub fn offset(&self) -> usize {
4964 self.range.start
4965 }
4966
4967 pub fn range(&self) -> Range<usize> {
4968 self.range.clone()
4969 }
4970
4971 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4972 let depth = match endpoint.severity {
4973 DiagnosticSeverity::ERROR => &mut self.error_depth,
4974 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4975 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4976 DiagnosticSeverity::HINT => &mut self.hint_depth,
4977 _ => return,
4978 };
4979 if endpoint.is_start {
4980 *depth += 1;
4981 } else {
4982 *depth -= 1;
4983 }
4984
4985 if endpoint.is_unnecessary {
4986 if endpoint.is_start {
4987 self.unnecessary_depth += 1;
4988 } else {
4989 self.unnecessary_depth -= 1;
4990 }
4991 }
4992 }
4993
4994 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4995 if self.error_depth > 0 {
4996 Some(DiagnosticSeverity::ERROR)
4997 } else if self.warning_depth > 0 {
4998 Some(DiagnosticSeverity::WARNING)
4999 } else if self.information_depth > 0 {
5000 Some(DiagnosticSeverity::INFORMATION)
5001 } else if self.hint_depth > 0 {
5002 Some(DiagnosticSeverity::HINT)
5003 } else {
5004 None
5005 }
5006 }
5007
5008 fn current_code_is_unnecessary(&self) -> bool {
5009 self.unnecessary_depth > 0
5010 }
5011}
5012
5013impl<'a> Iterator for BufferChunks<'a> {
5014 type Item = Chunk<'a>;
5015
5016 fn next(&mut self) -> Option<Self::Item> {
5017 let mut next_capture_start = usize::MAX;
5018 let mut next_diagnostic_endpoint = usize::MAX;
5019
5020 if let Some(highlights) = self.highlights.as_mut() {
5021 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5022 if *parent_capture_end <= self.range.start {
5023 highlights.stack.pop();
5024 } else {
5025 break;
5026 }
5027 }
5028
5029 if highlights.next_capture.is_none() {
5030 highlights.next_capture = highlights.captures.next();
5031 }
5032
5033 while let Some(capture) = highlights.next_capture.as_ref() {
5034 if self.range.start < capture.node.start_byte() {
5035 next_capture_start = capture.node.start_byte();
5036 break;
5037 } else {
5038 let highlight_id =
5039 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5040 highlights
5041 .stack
5042 .push((capture.node.end_byte(), highlight_id));
5043 highlights.next_capture = highlights.captures.next();
5044 }
5045 }
5046 }
5047
5048 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5049 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5050 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5051 if endpoint.offset <= self.range.start {
5052 self.update_diagnostic_depths(endpoint);
5053 diagnostic_endpoints.next();
5054 self.underline = endpoint.underline;
5055 } else {
5056 next_diagnostic_endpoint = endpoint.offset;
5057 break;
5058 }
5059 }
5060 }
5061 self.diagnostic_endpoints = diagnostic_endpoints;
5062
5063 if let Some(ChunkBitmaps {
5064 text: chunk,
5065 chars: chars_map,
5066 tabs,
5067 }) = self.chunks.peek_with_bitmaps()
5068 {
5069 let chunk_start = self.range.start;
5070 let mut chunk_end = (self.chunks.offset() + chunk.len())
5071 .min(next_capture_start)
5072 .min(next_diagnostic_endpoint);
5073 let mut highlight_id = None;
5074 if let Some(highlights) = self.highlights.as_ref()
5075 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5076 {
5077 chunk_end = chunk_end.min(*parent_capture_end);
5078 highlight_id = Some(*parent_highlight_id);
5079 }
5080 let bit_start = chunk_start - self.chunks.offset();
5081 let bit_end = chunk_end - self.chunks.offset();
5082
5083 let slice = &chunk[bit_start..bit_end];
5084
5085 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5086 let tabs = (tabs >> bit_start) & mask;
5087 let chars = (chars_map >> bit_start) & mask;
5088
5089 self.range.start = chunk_end;
5090 if self.range.start == self.chunks.offset() + chunk.len() {
5091 self.chunks.next().unwrap();
5092 }
5093
5094 Some(Chunk {
5095 text: slice,
5096 syntax_highlight_id: highlight_id,
5097 underline: self.underline,
5098 diagnostic_severity: self.current_diagnostic_severity(),
5099 is_unnecessary: self.current_code_is_unnecessary(),
5100 tabs,
5101 chars,
5102 ..Chunk::default()
5103 })
5104 } else {
5105 None
5106 }
5107 }
5108}
5109
5110impl operation_queue::Operation for Operation {
5111 fn lamport_timestamp(&self) -> clock::Lamport {
5112 match self {
5113 Operation::Buffer(_) => {
5114 unreachable!("buffer operations should never be deferred at this layer")
5115 }
5116 Operation::UpdateDiagnostics {
5117 lamport_timestamp, ..
5118 }
5119 | Operation::UpdateSelections {
5120 lamport_timestamp, ..
5121 }
5122 | Operation::UpdateCompletionTriggers {
5123 lamport_timestamp, ..
5124 }
5125 | Operation::UpdateLineEnding {
5126 lamport_timestamp, ..
5127 } => *lamport_timestamp,
5128 }
5129 }
5130}
5131
5132impl Default for Diagnostic {
5133 fn default() -> Self {
5134 Self {
5135 source: Default::default(),
5136 source_kind: DiagnosticSourceKind::Other,
5137 code: None,
5138 code_description: None,
5139 severity: DiagnosticSeverity::ERROR,
5140 message: Default::default(),
5141 markdown: None,
5142 group_id: 0,
5143 is_primary: false,
5144 is_disk_based: false,
5145 is_unnecessary: false,
5146 underline: true,
5147 data: None,
5148 }
5149 }
5150}
5151
5152impl IndentSize {
5153 /// Returns an [`IndentSize`] representing the given spaces.
5154 pub fn spaces(len: u32) -> Self {
5155 Self {
5156 len,
5157 kind: IndentKind::Space,
5158 }
5159 }
5160
5161 /// Returns an [`IndentSize`] representing a tab.
5162 pub fn tab() -> Self {
5163 Self {
5164 len: 1,
5165 kind: IndentKind::Tab,
5166 }
5167 }
5168
5169 /// An iterator over the characters represented by this [`IndentSize`].
5170 pub fn chars(&self) -> impl Iterator<Item = char> {
5171 iter::repeat(self.char()).take(self.len as usize)
5172 }
5173
5174 /// The character representation of this [`IndentSize`].
5175 pub fn char(&self) -> char {
5176 match self.kind {
5177 IndentKind::Space => ' ',
5178 IndentKind::Tab => '\t',
5179 }
5180 }
5181
5182 /// Consumes the current [`IndentSize`] and returns a new one that has
5183 /// been shrunk or enlarged by the given size along the given direction.
5184 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5185 match direction {
5186 Ordering::Less => {
5187 if self.kind == size.kind && self.len >= size.len {
5188 self.len -= size.len;
5189 }
5190 }
5191 Ordering::Equal => {}
5192 Ordering::Greater => {
5193 if self.len == 0 {
5194 self = size;
5195 } else if self.kind == size.kind {
5196 self.len += size.len;
5197 }
5198 }
5199 }
5200 self
5201 }
5202
5203 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5204 match self.kind {
5205 IndentKind::Space => self.len as usize,
5206 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5207 }
5208 }
5209}
5210
5211#[cfg(any(test, feature = "test-support"))]
5212pub struct TestFile {
5213 pub path: Arc<RelPath>,
5214 pub root_name: String,
5215 pub local_root: Option<PathBuf>,
5216}
5217
5218#[cfg(any(test, feature = "test-support"))]
5219impl File for TestFile {
5220 fn path(&self) -> &Arc<RelPath> {
5221 &self.path
5222 }
5223
5224 fn full_path(&self, _: &gpui::App) -> PathBuf {
5225 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5226 }
5227
5228 fn as_local(&self) -> Option<&dyn LocalFile> {
5229 if self.local_root.is_some() {
5230 Some(self)
5231 } else {
5232 None
5233 }
5234 }
5235
5236 fn disk_state(&self) -> DiskState {
5237 unimplemented!()
5238 }
5239
5240 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5241 self.path().file_name().unwrap_or(self.root_name.as_ref())
5242 }
5243
5244 fn worktree_id(&self, _: &App) -> WorktreeId {
5245 WorktreeId::from_usize(0)
5246 }
5247
5248 fn to_proto(&self, _: &App) -> rpc::proto::File {
5249 unimplemented!()
5250 }
5251
5252 fn is_private(&self) -> bool {
5253 false
5254 }
5255
5256 fn path_style(&self, _cx: &App) -> PathStyle {
5257 PathStyle::local()
5258 }
5259}
5260
5261#[cfg(any(test, feature = "test-support"))]
5262impl LocalFile for TestFile {
5263 fn abs_path(&self, _cx: &App) -> PathBuf {
5264 PathBuf::from(self.local_root.as_ref().unwrap())
5265 .join(&self.root_name)
5266 .join(self.path.as_std_path())
5267 }
5268
5269 fn load(
5270 &self,
5271 _cx: &App,
5272 _encoding: EncodingWrapper,
5273 _force: bool,
5274 _detect_utf16: bool,
5275 _buffer_encoding: Option<Arc<std::sync::Mutex<&'static Encoding>>>,
5276 ) -> Task<Result<String>> {
5277 unimplemented!()
5278 }
5279
5280 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5281 unimplemented!()
5282 }
5283}
5284
5285pub(crate) fn contiguous_ranges(
5286 values: impl Iterator<Item = u32>,
5287 max_len: usize,
5288) -> impl Iterator<Item = Range<u32>> {
5289 let mut values = values;
5290 let mut current_range: Option<Range<u32>> = None;
5291 std::iter::from_fn(move || {
5292 loop {
5293 if let Some(value) = values.next() {
5294 if let Some(range) = &mut current_range
5295 && value == range.end
5296 && range.len() < max_len
5297 {
5298 range.end += 1;
5299 continue;
5300 }
5301
5302 let prev_range = current_range.clone();
5303 current_range = Some(value..(value + 1));
5304 if prev_range.is_some() {
5305 return prev_range;
5306 }
5307 } else {
5308 return current_range.take();
5309 }
5310 }
5311 })
5312}
5313
5314#[derive(Default, Debug)]
5315pub struct CharClassifier {
5316 scope: Option<LanguageScope>,
5317 scope_context: Option<CharScopeContext>,
5318 ignore_punctuation: bool,
5319}
5320
5321impl CharClassifier {
5322 pub fn new(scope: Option<LanguageScope>) -> Self {
5323 Self {
5324 scope,
5325 scope_context: None,
5326 ignore_punctuation: false,
5327 }
5328 }
5329
5330 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5331 Self {
5332 scope_context,
5333 ..self
5334 }
5335 }
5336
5337 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5338 Self {
5339 ignore_punctuation,
5340 ..self
5341 }
5342 }
5343
5344 pub fn is_whitespace(&self, c: char) -> bool {
5345 self.kind(c) == CharKind::Whitespace
5346 }
5347
5348 pub fn is_word(&self, c: char) -> bool {
5349 self.kind(c) == CharKind::Word
5350 }
5351
5352 pub fn is_punctuation(&self, c: char) -> bool {
5353 self.kind(c) == CharKind::Punctuation
5354 }
5355
5356 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5357 if c.is_alphanumeric() || c == '_' {
5358 return CharKind::Word;
5359 }
5360
5361 if let Some(scope) = &self.scope {
5362 let characters = match self.scope_context {
5363 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5364 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5365 None => scope.word_characters(),
5366 };
5367 if let Some(characters) = characters
5368 && characters.contains(&c)
5369 {
5370 return CharKind::Word;
5371 }
5372 }
5373
5374 if c.is_whitespace() {
5375 return CharKind::Whitespace;
5376 }
5377
5378 if ignore_punctuation {
5379 CharKind::Word
5380 } else {
5381 CharKind::Punctuation
5382 }
5383 }
5384
5385 pub fn kind(&self, c: char) -> CharKind {
5386 self.kind_with(c, self.ignore_punctuation)
5387 }
5388}
5389
5390/// Find all of the ranges of whitespace that occur at the ends of lines
5391/// in the given rope.
5392///
5393/// This could also be done with a regex search, but this implementation
5394/// avoids copying text.
5395pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5396 let mut ranges = Vec::new();
5397
5398 let mut offset = 0;
5399 let mut prev_chunk_trailing_whitespace_range = 0..0;
5400 for chunk in rope.chunks() {
5401 let mut prev_line_trailing_whitespace_range = 0..0;
5402 for (i, line) in chunk.split('\n').enumerate() {
5403 let line_end_offset = offset + line.len();
5404 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5405 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5406
5407 if i == 0 && trimmed_line_len == 0 {
5408 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5409 }
5410 if !prev_line_trailing_whitespace_range.is_empty() {
5411 ranges.push(prev_line_trailing_whitespace_range);
5412 }
5413
5414 offset = line_end_offset + 1;
5415 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5416 }
5417
5418 offset -= 1;
5419 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5420 }
5421
5422 if !prev_chunk_trailing_whitespace_range.is_empty() {
5423 ranges.push(prev_chunk_trailing_whitespace_range);
5424 }
5425
5426 ranges
5427}