1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::{MTime, encodings::EncodingWrapper};
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<std::sync::Mutex<&'static Encoding>>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(&self, cx: &App, encoding: EncodingWrapper, detect_utf16: bool)
418 -> Task<Result<String>>;
419
420 /// Loads the file's contents from disk.
421 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
422}
423
424/// The auto-indent behavior associated with an editing operation.
425/// For some editing operations, each affected line of text has its
426/// indentation recomputed. For other operations, the entire block
427/// of edited text is adjusted uniformly.
428#[derive(Clone, Debug)]
429pub enum AutoindentMode {
430 /// Indent each line of inserted text.
431 EachLine,
432 /// Apply the same indentation adjustment to all of the lines
433 /// in a given insertion.
434 Block {
435 /// The original indentation column of the first line of each
436 /// insertion, if it has been copied.
437 ///
438 /// Knowing this makes it possible to preserve the relative indentation
439 /// of every line in the insertion from when it was copied.
440 ///
441 /// If the original indent column is `a`, and the first line of insertion
442 /// is then auto-indented to column `b`, then every other line of
443 /// the insertion will be auto-indented to column `b - a`
444 original_indent_columns: Vec<Option<u32>>,
445 },
446}
447
448#[derive(Clone)]
449struct AutoindentRequest {
450 before_edit: BufferSnapshot,
451 entries: Vec<AutoindentRequestEntry>,
452 is_block_mode: bool,
453 ignore_empty_lines: bool,
454}
455
456#[derive(Debug, Clone)]
457struct AutoindentRequestEntry {
458 /// A range of the buffer whose indentation should be adjusted.
459 range: Range<Anchor>,
460 /// Whether or not these lines should be considered brand new, for the
461 /// purpose of auto-indent. When text is not new, its indentation will
462 /// only be adjusted if the suggested indentation level has *changed*
463 /// since the edit was made.
464 first_line_is_new: bool,
465 indent_size: IndentSize,
466 original_indent_column: Option<u32>,
467}
468
469#[derive(Debug)]
470struct IndentSuggestion {
471 basis_row: u32,
472 delta: Ordering,
473 within_error: bool,
474}
475
476struct BufferChunkHighlights<'a> {
477 captures: SyntaxMapCaptures<'a>,
478 next_capture: Option<SyntaxMapCapture<'a>>,
479 stack: Vec<(usize, HighlightId)>,
480 highlight_maps: Vec<HighlightMap>,
481}
482
483/// An iterator that yields chunks of a buffer's text, along with their
484/// syntax highlights and diagnostic status.
485pub struct BufferChunks<'a> {
486 buffer_snapshot: Option<&'a BufferSnapshot>,
487 range: Range<usize>,
488 chunks: text::Chunks<'a>,
489 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
490 error_depth: usize,
491 warning_depth: usize,
492 information_depth: usize,
493 hint_depth: usize,
494 unnecessary_depth: usize,
495 underline: bool,
496 highlights: Option<BufferChunkHighlights<'a>>,
497}
498
499/// A chunk of a buffer's text, along with its syntax highlight and
500/// diagnostic status.
501#[derive(Clone, Debug, Default)]
502pub struct Chunk<'a> {
503 /// The text of the chunk.
504 pub text: &'a str,
505 /// The syntax highlighting style of the chunk.
506 pub syntax_highlight_id: Option<HighlightId>,
507 /// The highlight style that has been applied to this chunk in
508 /// the editor.
509 pub highlight_style: Option<HighlightStyle>,
510 /// The severity of diagnostic associated with this chunk, if any.
511 pub diagnostic_severity: Option<DiagnosticSeverity>,
512 /// A bitset of which characters are tabs in this string.
513 pub tabs: u128,
514 /// Bitmap of character indices in this chunk
515 pub chars: u128,
516 /// Whether this chunk of text is marked as unnecessary.
517 pub is_unnecessary: bool,
518 /// Whether this chunk of text was originally a tab character.
519 pub is_tab: bool,
520 /// Whether this chunk of text was originally an inlay.
521 pub is_inlay: bool,
522 /// Whether to underline the corresponding text range in the editor.
523 pub underline: bool,
524}
525
526/// A set of edits to a given version of a buffer, computed asynchronously.
527#[derive(Debug)]
528pub struct Diff {
529 pub base_version: clock::Global,
530 pub line_ending: LineEnding,
531 pub edits: Vec<(Range<usize>, Arc<str>)>,
532}
533
534#[derive(Debug, Clone, Copy)]
535pub(crate) struct DiagnosticEndpoint {
536 offset: usize,
537 is_start: bool,
538 underline: bool,
539 severity: DiagnosticSeverity,
540 is_unnecessary: bool,
541}
542
543/// A class of characters, used for characterizing a run of text.
544#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
545pub enum CharKind {
546 /// Whitespace.
547 Whitespace,
548 /// Punctuation.
549 Punctuation,
550 /// Word.
551 Word,
552}
553
554/// Context for character classification within a specific scope.
555#[derive(Copy, Clone, Eq, PartialEq, Debug)]
556pub enum CharScopeContext {
557 /// Character classification for completion queries.
558 ///
559 /// This context treats certain characters as word constituents that would
560 /// normally be considered punctuation, such as '-' in Tailwind classes
561 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
562 Completion,
563 /// Character classification for linked edits.
564 ///
565 /// This context handles characters that should be treated as part of
566 /// identifiers during linked editing operations, such as '.' in JSX
567 /// component names like `<Animated.View>`.
568 LinkedEdit,
569}
570
571/// A runnable is a set of data about a region that could be resolved into a task
572pub struct Runnable {
573 pub tags: SmallVec<[RunnableTag; 1]>,
574 pub language: Arc<Language>,
575 pub buffer: BufferId,
576}
577
578#[derive(Default, Clone, Debug)]
579pub struct HighlightedText {
580 pub text: SharedString,
581 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
582}
583
584#[derive(Default, Debug)]
585struct HighlightedTextBuilder {
586 pub text: String,
587 highlights: Vec<(Range<usize>, HighlightStyle)>,
588}
589
590impl HighlightedText {
591 pub fn from_buffer_range<T: ToOffset>(
592 range: Range<T>,
593 snapshot: &text::BufferSnapshot,
594 syntax_snapshot: &SyntaxSnapshot,
595 override_style: Option<HighlightStyle>,
596 syntax_theme: &SyntaxTheme,
597 ) -> Self {
598 let mut highlighted_text = HighlightedTextBuilder::default();
599 highlighted_text.add_text_from_buffer_range(
600 range,
601 snapshot,
602 syntax_snapshot,
603 override_style,
604 syntax_theme,
605 );
606 highlighted_text.build()
607 }
608
609 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
610 gpui::StyledText::new(self.text.clone())
611 .with_default_highlights(default_style, self.highlights.iter().cloned())
612 }
613
614 /// Returns the first line without leading whitespace unless highlighted
615 /// and a boolean indicating if there are more lines after
616 pub fn first_line_preview(self) -> (Self, bool) {
617 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
618 let first_line = &self.text[..newline_ix];
619
620 // Trim leading whitespace, unless an edit starts prior to it.
621 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
622 if let Some((first_highlight_range, _)) = self.highlights.first() {
623 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
624 }
625
626 let preview_text = &first_line[preview_start_ix..];
627 let preview_highlights = self
628 .highlights
629 .into_iter()
630 .skip_while(|(range, _)| range.end <= preview_start_ix)
631 .take_while(|(range, _)| range.start < newline_ix)
632 .filter_map(|(mut range, highlight)| {
633 range.start = range.start.saturating_sub(preview_start_ix);
634 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
635 if range.is_empty() {
636 None
637 } else {
638 Some((range, highlight))
639 }
640 });
641
642 let preview = Self {
643 text: SharedString::new(preview_text),
644 highlights: preview_highlights.collect(),
645 };
646
647 (preview, self.text.len() > newline_ix)
648 }
649}
650
651impl HighlightedTextBuilder {
652 pub fn build(self) -> HighlightedText {
653 HighlightedText {
654 text: self.text.into(),
655 highlights: self.highlights,
656 }
657 }
658
659 pub fn add_text_from_buffer_range<T: ToOffset>(
660 &mut self,
661 range: Range<T>,
662 snapshot: &text::BufferSnapshot,
663 syntax_snapshot: &SyntaxSnapshot,
664 override_style: Option<HighlightStyle>,
665 syntax_theme: &SyntaxTheme,
666 ) {
667 let range = range.to_offset(snapshot);
668 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
669 let start = self.text.len();
670 self.text.push_str(chunk.text);
671 let end = self.text.len();
672
673 if let Some(highlight_style) = chunk
674 .syntax_highlight_id
675 .and_then(|id| id.style(syntax_theme))
676 {
677 let highlight_style = override_style.map_or(highlight_style, |override_style| {
678 highlight_style.highlight(override_style)
679 });
680 self.highlights.push((start..end, highlight_style));
681 } else if let Some(override_style) = override_style {
682 self.highlights.push((start..end, override_style));
683 }
684 }
685 }
686
687 fn highlighted_chunks<'a>(
688 range: Range<usize>,
689 snapshot: &'a text::BufferSnapshot,
690 syntax_snapshot: &'a SyntaxSnapshot,
691 ) -> BufferChunks<'a> {
692 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
693 grammar
694 .highlights_config
695 .as_ref()
696 .map(|config| &config.query)
697 });
698
699 let highlight_maps = captures
700 .grammars()
701 .iter()
702 .map(|grammar| grammar.highlight_map())
703 .collect();
704
705 BufferChunks::new(
706 snapshot.as_rope(),
707 range,
708 Some((captures, highlight_maps)),
709 false,
710 None,
711 )
712 }
713}
714
715#[derive(Clone)]
716pub struct EditPreview {
717 old_snapshot: text::BufferSnapshot,
718 applied_edits_snapshot: text::BufferSnapshot,
719 syntax_snapshot: SyntaxSnapshot,
720}
721
722impl EditPreview {
723 pub fn highlight_edits(
724 &self,
725 current_snapshot: &BufferSnapshot,
726 edits: &[(Range<Anchor>, String)],
727 include_deletions: bool,
728 cx: &App,
729 ) -> HighlightedText {
730 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
731 return HighlightedText::default();
732 };
733
734 let mut highlighted_text = HighlightedTextBuilder::default();
735
736 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
737
738 let insertion_highlight_style = HighlightStyle {
739 background_color: Some(cx.theme().status().created_background),
740 ..Default::default()
741 };
742 let deletion_highlight_style = HighlightStyle {
743 background_color: Some(cx.theme().status().deleted_background),
744 ..Default::default()
745 };
746 let syntax_theme = cx.theme().syntax();
747
748 for (range, edit_text) in edits {
749 let edit_new_end_in_preview_snapshot = range
750 .end
751 .bias_right(&self.old_snapshot)
752 .to_offset(&self.applied_edits_snapshot);
753 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
754
755 let unchanged_range_in_preview_snapshot =
756 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
757 if !unchanged_range_in_preview_snapshot.is_empty() {
758 highlighted_text.add_text_from_buffer_range(
759 unchanged_range_in_preview_snapshot,
760 &self.applied_edits_snapshot,
761 &self.syntax_snapshot,
762 None,
763 syntax_theme,
764 );
765 }
766
767 let range_in_current_snapshot = range.to_offset(current_snapshot);
768 if include_deletions && !range_in_current_snapshot.is_empty() {
769 highlighted_text.add_text_from_buffer_range(
770 range_in_current_snapshot,
771 ¤t_snapshot.text,
772 ¤t_snapshot.syntax,
773 Some(deletion_highlight_style),
774 syntax_theme,
775 );
776 }
777
778 if !edit_text.is_empty() {
779 highlighted_text.add_text_from_buffer_range(
780 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
781 &self.applied_edits_snapshot,
782 &self.syntax_snapshot,
783 Some(insertion_highlight_style),
784 syntax_theme,
785 );
786 }
787
788 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
789 }
790
791 highlighted_text.add_text_from_buffer_range(
792 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
793 &self.applied_edits_snapshot,
794 &self.syntax_snapshot,
795 None,
796 syntax_theme,
797 );
798
799 highlighted_text.build()
800 }
801
802 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
803 let (first, _) = edits.first()?;
804 let (last, _) = edits.last()?;
805
806 let start = first
807 .start
808 .bias_left(&self.old_snapshot)
809 .to_point(&self.applied_edits_snapshot);
810 let end = last
811 .end
812 .bias_right(&self.old_snapshot)
813 .to_point(&self.applied_edits_snapshot);
814
815 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
816 let range = Point::new(start.row, 0)
817 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
818
819 Some(range.to_offset(&self.applied_edits_snapshot))
820 }
821}
822
823#[derive(Clone, Debug, PartialEq, Eq)]
824pub struct BracketMatch {
825 pub open_range: Range<usize>,
826 pub close_range: Range<usize>,
827 pub newline_only: bool,
828}
829
830impl Buffer {
831 /// Create a new buffer with the given base text.
832 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
833 Self::build(
834 TextBuffer::new(
835 ReplicaId::LOCAL,
836 cx.entity_id().as_non_zero_u64().into(),
837 base_text.into(),
838 &cx.background_executor(),
839 ),
840 None,
841 Capability::ReadWrite,
842 )
843 }
844
845 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
846 pub fn local_normalized(
847 base_text_normalized: Rope,
848 line_ending: LineEnding,
849 cx: &Context<Self>,
850 ) -> Self {
851 Self::build(
852 TextBuffer::new_normalized(
853 ReplicaId::LOCAL,
854 cx.entity_id().as_non_zero_u64().into(),
855 line_ending,
856 base_text_normalized,
857 ),
858 None,
859 Capability::ReadWrite,
860 )
861 }
862
863 /// Create a new buffer that is a replica of a remote buffer.
864 pub fn remote(
865 remote_id: BufferId,
866 replica_id: ReplicaId,
867 capability: Capability,
868 base_text: impl Into<String>,
869 cx: &BackgroundExecutor,
870 ) -> Self {
871 Self::build(
872 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
873 None,
874 capability,
875 )
876 }
877
878 /// Create a new buffer that is a replica of a remote buffer, populating its
879 /// state from the given protobuf message.
880 pub fn from_proto(
881 replica_id: ReplicaId,
882 capability: Capability,
883 message: proto::BufferState,
884 file: Option<Arc<dyn File>>,
885 cx: &BackgroundExecutor,
886 ) -> Result<Self> {
887 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
888 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
889 let mut this = Self::build(buffer, file, capability);
890 this.text.set_line_ending(proto::deserialize_line_ending(
891 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
892 ));
893 this.saved_version = proto::deserialize_version(&message.saved_version);
894 this.saved_mtime = message.saved_mtime.map(|time| time.into());
895 Ok(this)
896 }
897
898 /// Serialize the buffer's state to a protobuf message.
899 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
900 proto::BufferState {
901 id: self.remote_id().into(),
902 file: self.file.as_ref().map(|f| f.to_proto(cx)),
903 base_text: self.base_text().to_string(),
904 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
905 saved_version: proto::serialize_version(&self.saved_version),
906 saved_mtime: self.saved_mtime.map(|time| time.into()),
907 }
908 }
909
910 /// Serialize as protobufs all of the changes to the buffer since the given version.
911 pub fn serialize_ops(
912 &self,
913 since: Option<clock::Global>,
914 cx: &App,
915 ) -> Task<Vec<proto::Operation>> {
916 let mut operations = Vec::new();
917 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
918
919 operations.extend(self.remote_selections.iter().map(|(_, set)| {
920 proto::serialize_operation(&Operation::UpdateSelections {
921 selections: set.selections.clone(),
922 lamport_timestamp: set.lamport_timestamp,
923 line_mode: set.line_mode,
924 cursor_shape: set.cursor_shape,
925 })
926 }));
927
928 for (server_id, diagnostics) in &self.diagnostics {
929 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
930 lamport_timestamp: self.diagnostics_timestamp,
931 server_id: *server_id,
932 diagnostics: diagnostics.iter().cloned().collect(),
933 }));
934 }
935
936 for (server_id, completions) in &self.completion_triggers_per_language_server {
937 operations.push(proto::serialize_operation(
938 &Operation::UpdateCompletionTriggers {
939 triggers: completions.iter().cloned().collect(),
940 lamport_timestamp: self.completion_triggers_timestamp,
941 server_id: *server_id,
942 },
943 ));
944 }
945
946 let text_operations = self.text.operations().clone();
947 cx.background_spawn(async move {
948 let since = since.unwrap_or_default();
949 operations.extend(
950 text_operations
951 .iter()
952 .filter(|(_, op)| !since.observed(op.timestamp()))
953 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
954 );
955 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
956 operations
957 })
958 }
959
960 /// Assign a language to the buffer, returning the buffer.
961 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
962 self.set_language(Some(language), cx);
963 self
964 }
965
966 /// Returns the [`Capability`] of this buffer.
967 pub fn capability(&self) -> Capability {
968 self.capability
969 }
970
971 /// Whether this buffer can only be read.
972 pub fn read_only(&self) -> bool {
973 self.capability == Capability::ReadOnly
974 }
975
976 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
977 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
978 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
979 let snapshot = buffer.snapshot();
980 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
981 Self {
982 saved_mtime,
983 saved_version: buffer.version(),
984 preview_version: buffer.version(),
985 reload_task: None,
986 transaction_depth: 0,
987 was_dirty_before_starting_transaction: None,
988 has_unsaved_edits: Cell::new((buffer.version(), false)),
989 text: buffer,
990 branch_state: None,
991 file,
992 capability,
993 syntax_map,
994 reparse: None,
995 non_text_state_update_count: 0,
996 sync_parse_timeout: Duration::from_millis(1),
997 parse_status: watch::channel(ParseStatus::Idle),
998 autoindent_requests: Default::default(),
999 wait_for_autoindent_txs: Default::default(),
1000 pending_autoindent: Default::default(),
1001 language: None,
1002 remote_selections: Default::default(),
1003 diagnostics: Default::default(),
1004 diagnostics_timestamp: Lamport::MIN,
1005 completion_triggers: Default::default(),
1006 completion_triggers_per_language_server: Default::default(),
1007 completion_triggers_timestamp: Lamport::MIN,
1008 deferred_ops: OperationQueue::new(),
1009 has_conflict: false,
1010 change_bits: Default::default(),
1011 _subscriptions: Vec::new(),
1012 encoding: Arc::new(std::sync::Mutex::new(encoding_rs::UTF_8)),
1013 }
1014 }
1015
1016 pub fn build_snapshot(
1017 text: Rope,
1018 language: Option<Arc<Language>>,
1019 language_registry: Option<Arc<LanguageRegistry>>,
1020 cx: &mut App,
1021 ) -> impl Future<Output = BufferSnapshot> + use<> {
1022 let entity_id = cx.reserve_entity::<Self>().entity_id();
1023 let buffer_id = entity_id.as_non_zero_u64().into();
1024 async move {
1025 let text =
1026 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1027 .snapshot();
1028 let mut syntax = SyntaxMap::new(&text).snapshot();
1029 if let Some(language) = language.clone() {
1030 let language_registry = language_registry.clone();
1031 syntax.reparse(&text, language_registry, language);
1032 }
1033 BufferSnapshot {
1034 text,
1035 syntax,
1036 file: None,
1037 diagnostics: Default::default(),
1038 remote_selections: Default::default(),
1039 language,
1040 non_text_state_update_count: 0,
1041 }
1042 }
1043 }
1044
1045 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1046 let entity_id = cx.reserve_entity::<Self>().entity_id();
1047 let buffer_id = entity_id.as_non_zero_u64().into();
1048 let text = TextBuffer::new_normalized(
1049 ReplicaId::LOCAL,
1050 buffer_id,
1051 Default::default(),
1052 Rope::new(),
1053 )
1054 .snapshot();
1055 let syntax = SyntaxMap::new(&text).snapshot();
1056 BufferSnapshot {
1057 text,
1058 syntax,
1059 file: None,
1060 diagnostics: Default::default(),
1061 remote_selections: Default::default(),
1062 language: None,
1063 non_text_state_update_count: 0,
1064 }
1065 }
1066
1067 #[cfg(any(test, feature = "test-support"))]
1068 pub fn build_snapshot_sync(
1069 text: Rope,
1070 language: Option<Arc<Language>>,
1071 language_registry: Option<Arc<LanguageRegistry>>,
1072 cx: &mut App,
1073 ) -> BufferSnapshot {
1074 let entity_id = cx.reserve_entity::<Self>().entity_id();
1075 let buffer_id = entity_id.as_non_zero_u64().into();
1076 let text =
1077 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1078 .snapshot();
1079 let mut syntax = SyntaxMap::new(&text).snapshot();
1080 if let Some(language) = language.clone() {
1081 syntax.reparse(&text, language_registry, language);
1082 }
1083 BufferSnapshot {
1084 text,
1085 syntax,
1086 file: None,
1087 diagnostics: Default::default(),
1088 remote_selections: Default::default(),
1089 language,
1090 non_text_state_update_count: 0,
1091 }
1092 }
1093
1094 /// Retrieve a snapshot of the buffer's current state. This is computationally
1095 /// cheap, and allows reading from the buffer on a background thread.
1096 pub fn snapshot(&self) -> BufferSnapshot {
1097 let text = self.text.snapshot();
1098 let mut syntax_map = self.syntax_map.lock();
1099 syntax_map.interpolate(&text);
1100 let syntax = syntax_map.snapshot();
1101
1102 BufferSnapshot {
1103 text,
1104 syntax,
1105 file: self.file.clone(),
1106 remote_selections: self.remote_selections.clone(),
1107 diagnostics: self.diagnostics.clone(),
1108 language: self.language.clone(),
1109 non_text_state_update_count: self.non_text_state_update_count,
1110 }
1111 }
1112
1113 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1114 let this = cx.entity();
1115 cx.new(|cx| {
1116 let mut branch = Self {
1117 branch_state: Some(BufferBranchState {
1118 base_buffer: this.clone(),
1119 merged_operations: Default::default(),
1120 }),
1121 language: self.language.clone(),
1122 has_conflict: self.has_conflict,
1123 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1124 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1125 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1126 };
1127 if let Some(language_registry) = self.language_registry() {
1128 branch.set_language_registry(language_registry);
1129 }
1130
1131 // Reparse the branch buffer so that we get syntax highlighting immediately.
1132 branch.reparse(cx);
1133
1134 branch
1135 })
1136 }
1137
1138 pub fn preview_edits(
1139 &self,
1140 edits: Arc<[(Range<Anchor>, String)]>,
1141 cx: &App,
1142 ) -> Task<EditPreview> {
1143 let registry = self.language_registry();
1144 let language = self.language().cloned();
1145 let old_snapshot = self.text.snapshot();
1146 let mut branch_buffer = self.text.branch();
1147 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1148 let executor = cx.background_executor().clone();
1149 cx.background_spawn(async move {
1150 if !edits.is_empty() {
1151 if let Some(language) = language.clone() {
1152 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1153 }
1154
1155 branch_buffer.edit(edits.iter().cloned(), &executor);
1156 let snapshot = branch_buffer.snapshot();
1157 syntax_snapshot.interpolate(&snapshot);
1158
1159 if let Some(language) = language {
1160 syntax_snapshot.reparse(&snapshot, registry, language);
1161 }
1162 }
1163 EditPreview {
1164 old_snapshot,
1165 applied_edits_snapshot: branch_buffer.snapshot(),
1166 syntax_snapshot,
1167 }
1168 })
1169 }
1170
1171 /// Applies all of the changes in this buffer that intersect any of the
1172 /// given `ranges` to its base buffer.
1173 ///
1174 /// If `ranges` is empty, then all changes will be applied. This buffer must
1175 /// be a branch buffer to call this method.
1176 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1177 let Some(base_buffer) = self.base_buffer() else {
1178 debug_panic!("not a branch buffer");
1179 return;
1180 };
1181
1182 let mut ranges = if ranges.is_empty() {
1183 &[0..usize::MAX]
1184 } else {
1185 ranges.as_slice()
1186 }
1187 .iter()
1188 .peekable();
1189
1190 let mut edits = Vec::new();
1191 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1192 let mut is_included = false;
1193 while let Some(range) = ranges.peek() {
1194 if range.end < edit.new.start {
1195 ranges.next().unwrap();
1196 } else {
1197 if range.start <= edit.new.end {
1198 is_included = true;
1199 }
1200 break;
1201 }
1202 }
1203
1204 if is_included {
1205 edits.push((
1206 edit.old.clone(),
1207 self.text_for_range(edit.new.clone()).collect::<String>(),
1208 ));
1209 }
1210 }
1211
1212 let operation = base_buffer.update(cx, |base_buffer, cx| {
1213 // cx.emit(BufferEvent::DiffBaseChanged);
1214 base_buffer.edit(edits, None, cx)
1215 });
1216
1217 if let Some(operation) = operation
1218 && let Some(BufferBranchState {
1219 merged_operations, ..
1220 }) = &mut self.branch_state
1221 {
1222 merged_operations.push(operation);
1223 }
1224 }
1225
1226 fn on_base_buffer_event(
1227 &mut self,
1228 _: Entity<Buffer>,
1229 event: &BufferEvent,
1230 cx: &mut Context<Self>,
1231 ) {
1232 let BufferEvent::Operation { operation, .. } = event else {
1233 return;
1234 };
1235 let Some(BufferBranchState {
1236 merged_operations, ..
1237 }) = &mut self.branch_state
1238 else {
1239 return;
1240 };
1241
1242 let mut operation_to_undo = None;
1243 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1244 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1245 {
1246 merged_operations.remove(ix);
1247 operation_to_undo = Some(operation.timestamp);
1248 }
1249
1250 self.apply_ops([operation.clone()], cx);
1251
1252 if let Some(timestamp) = operation_to_undo {
1253 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1254 self.undo_operations(counts, cx);
1255 }
1256 }
1257
1258 #[cfg(test)]
1259 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1260 &self.text
1261 }
1262
1263 /// Retrieve a snapshot of the buffer's raw text, without any
1264 /// language-related state like the syntax tree or diagnostics.
1265 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1266 self.text.snapshot()
1267 }
1268
1269 /// The file associated with the buffer, if any.
1270 pub fn file(&self) -> Option<&Arc<dyn File>> {
1271 self.file.as_ref()
1272 }
1273
1274 /// The version of the buffer that was last saved or reloaded from disk.
1275 pub fn saved_version(&self) -> &clock::Global {
1276 &self.saved_version
1277 }
1278
1279 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1280 pub fn saved_mtime(&self) -> Option<MTime> {
1281 self.saved_mtime
1282 }
1283
1284 /// Assign a language to the buffer.
1285 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1286 self.non_text_state_update_count += 1;
1287 self.syntax_map.lock().clear(&self.text);
1288 self.language = language;
1289 self.was_changed();
1290 self.reparse(cx);
1291 cx.emit(BufferEvent::LanguageChanged);
1292 }
1293
1294 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1295 /// other languages if parts of the buffer are written in different languages.
1296 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1297 self.syntax_map
1298 .lock()
1299 .set_language_registry(language_registry);
1300 }
1301
1302 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1303 self.syntax_map.lock().language_registry()
1304 }
1305
1306 /// Assign the line ending type to the buffer.
1307 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1308 self.text.set_line_ending(line_ending);
1309
1310 let lamport_timestamp = self.text.lamport_clock.tick();
1311 self.send_operation(
1312 Operation::UpdateLineEnding {
1313 line_ending,
1314 lamport_timestamp,
1315 },
1316 true,
1317 cx,
1318 );
1319 }
1320
1321 /// Assign the buffer a new [`Capability`].
1322 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1323 if self.capability != capability {
1324 self.capability = capability;
1325 cx.emit(BufferEvent::CapabilityChanged)
1326 }
1327 }
1328
1329 /// This method is called to signal that the buffer has been saved.
1330 pub fn did_save(
1331 &mut self,
1332 version: clock::Global,
1333 mtime: Option<MTime>,
1334 cx: &mut Context<Self>,
1335 ) {
1336 self.saved_version = version.clone();
1337 self.has_unsaved_edits.set((version, false));
1338 self.has_conflict = false;
1339 self.saved_mtime = mtime;
1340 self.was_changed();
1341 cx.emit(BufferEvent::Saved);
1342 cx.notify();
1343 }
1344
1345 /// Reloads the contents of the buffer from disk.
1346 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1347 let (tx, rx) = futures::channel::oneshot::channel();
1348 let encoding = EncodingWrapper::new(*(self.encoding.lock().unwrap()));
1349
1350 let prev_version = self.text.version();
1351 self.reload_task = Some(cx.spawn(async move |this, cx| {
1352 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1353 let file = this.file.as_ref()?.as_local()?;
1354 Some((file.disk_state().mtime(), {
1355 file.load(cx, encoding, false)
1356 }))
1357 })?
1358 else {
1359 return Ok(());
1360 };
1361
1362 let new_text = new_text.await?;
1363 let diff = this
1364 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1365 .await;
1366 this.update(cx, |this, cx| {
1367 if this.version() == diff.base_version {
1368 this.finalize_last_transaction();
1369 this.apply_diff(diff, cx);
1370 tx.send(this.finalize_last_transaction().cloned()).ok();
1371 this.has_conflict = false;
1372 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1373 } else {
1374 if !diff.edits.is_empty()
1375 || this
1376 .edits_since::<usize>(&diff.base_version)
1377 .next()
1378 .is_some()
1379 {
1380 this.has_conflict = true;
1381 }
1382
1383 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1384 }
1385
1386 this.reload_task.take();
1387 })
1388 }));
1389 rx
1390 }
1391
1392 /// This method is called to signal that the buffer has been reloaded.
1393 pub fn did_reload(
1394 &mut self,
1395 version: clock::Global,
1396 line_ending: LineEnding,
1397 mtime: Option<MTime>,
1398 cx: &mut Context<Self>,
1399 ) {
1400 self.saved_version = version;
1401 self.has_unsaved_edits
1402 .set((self.saved_version.clone(), false));
1403 self.text.set_line_ending(line_ending);
1404 self.saved_mtime = mtime;
1405 cx.emit(BufferEvent::Reloaded);
1406 cx.notify();
1407 }
1408
1409 /// Updates the [`File`] backing this buffer. This should be called when
1410 /// the file has changed or has been deleted.
1411 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1412 let was_dirty = self.is_dirty();
1413 let mut file_changed = false;
1414
1415 if let Some(old_file) = self.file.as_ref() {
1416 if new_file.path() != old_file.path() {
1417 file_changed = true;
1418 }
1419
1420 let old_state = old_file.disk_state();
1421 let new_state = new_file.disk_state();
1422 if old_state != new_state {
1423 file_changed = true;
1424 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1425 cx.emit(BufferEvent::ReloadNeeded)
1426 }
1427 }
1428 } else {
1429 file_changed = true;
1430 };
1431
1432 self.file = Some(new_file);
1433 if file_changed {
1434 self.was_changed();
1435 self.non_text_state_update_count += 1;
1436 if was_dirty != self.is_dirty() {
1437 cx.emit(BufferEvent::DirtyChanged);
1438 }
1439 cx.emit(BufferEvent::FileHandleChanged);
1440 cx.notify();
1441 }
1442 }
1443
1444 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1445 Some(self.branch_state.as_ref()?.base_buffer.clone())
1446 }
1447
1448 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1449 pub fn language(&self) -> Option<&Arc<Language>> {
1450 self.language.as_ref()
1451 }
1452
1453 /// Returns the [`Language`] at the given location.
1454 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1455 let offset = position.to_offset(self);
1456 let mut is_first = true;
1457 let start_anchor = self.anchor_before(offset);
1458 let end_anchor = self.anchor_after(offset);
1459 self.syntax_map
1460 .lock()
1461 .layers_for_range(offset..offset, &self.text, false)
1462 .filter(|layer| {
1463 if is_first {
1464 is_first = false;
1465 return true;
1466 }
1467
1468 layer
1469 .included_sub_ranges
1470 .map(|sub_ranges| {
1471 sub_ranges.iter().any(|sub_range| {
1472 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1473 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1474 !is_before_start && !is_after_end
1475 })
1476 })
1477 .unwrap_or(true)
1478 })
1479 .last()
1480 .map(|info| info.language.clone())
1481 .or_else(|| self.language.clone())
1482 }
1483
1484 /// Returns each [`Language`] for the active syntax layers at the given location.
1485 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1486 let offset = position.to_offset(self);
1487 let mut languages: Vec<Arc<Language>> = self
1488 .syntax_map
1489 .lock()
1490 .layers_for_range(offset..offset, &self.text, false)
1491 .map(|info| info.language.clone())
1492 .collect();
1493
1494 if languages.is_empty()
1495 && let Some(buffer_language) = self.language()
1496 {
1497 languages.push(buffer_language.clone());
1498 }
1499
1500 languages
1501 }
1502
1503 /// An integer version number that accounts for all updates besides
1504 /// the buffer's text itself (which is versioned via a version vector).
1505 pub fn non_text_state_update_count(&self) -> usize {
1506 self.non_text_state_update_count
1507 }
1508
1509 /// Whether the buffer is being parsed in the background.
1510 #[cfg(any(test, feature = "test-support"))]
1511 pub fn is_parsing(&self) -> bool {
1512 self.reparse.is_some()
1513 }
1514
1515 /// Indicates whether the buffer contains any regions that may be
1516 /// written in a language that hasn't been loaded yet.
1517 pub fn contains_unknown_injections(&self) -> bool {
1518 self.syntax_map.lock().contains_unknown_injections()
1519 }
1520
1521 #[cfg(any(test, feature = "test-support"))]
1522 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1523 self.sync_parse_timeout = timeout;
1524 }
1525
1526 /// Called after an edit to synchronize the buffer's main parse tree with
1527 /// the buffer's new underlying state.
1528 ///
1529 /// Locks the syntax map and interpolates the edits since the last reparse
1530 /// into the foreground syntax tree.
1531 ///
1532 /// Then takes a stable snapshot of the syntax map before unlocking it.
1533 /// The snapshot with the interpolated edits is sent to a background thread,
1534 /// where we ask Tree-sitter to perform an incremental parse.
1535 ///
1536 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1537 /// waiting on the parse to complete. As soon as it completes, we proceed
1538 /// synchronously, unless a 1ms timeout elapses.
1539 ///
1540 /// If we time out waiting on the parse, we spawn a second task waiting
1541 /// until the parse does complete and return with the interpolated tree still
1542 /// in the foreground. When the background parse completes, call back into
1543 /// the main thread and assign the foreground parse state.
1544 ///
1545 /// If the buffer or grammar changed since the start of the background parse,
1546 /// initiate an additional reparse recursively. To avoid concurrent parses
1547 /// for the same buffer, we only initiate a new parse if we are not already
1548 /// parsing in the background.
1549 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1550 if self.reparse.is_some() {
1551 return;
1552 }
1553 let language = if let Some(language) = self.language.clone() {
1554 language
1555 } else {
1556 return;
1557 };
1558
1559 let text = self.text_snapshot();
1560 let parsed_version = self.version();
1561
1562 let mut syntax_map = self.syntax_map.lock();
1563 syntax_map.interpolate(&text);
1564 let language_registry = syntax_map.language_registry();
1565 let mut syntax_snapshot = syntax_map.snapshot();
1566 drop(syntax_map);
1567
1568 let parse_task = cx.background_spawn({
1569 let language = language.clone();
1570 let language_registry = language_registry.clone();
1571 async move {
1572 syntax_snapshot.reparse(&text, language_registry, language);
1573 syntax_snapshot
1574 }
1575 });
1576
1577 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1578 match cx
1579 .background_executor()
1580 .block_with_timeout(self.sync_parse_timeout, parse_task)
1581 {
1582 Ok(new_syntax_snapshot) => {
1583 self.did_finish_parsing(new_syntax_snapshot, cx);
1584 self.reparse = None;
1585 }
1586 Err(parse_task) => {
1587 // todo(lw): hot foreground spawn
1588 self.reparse = Some(cx.spawn(async move |this, cx| {
1589 let new_syntax_map = cx.background_spawn(parse_task).await;
1590 this.update(cx, move |this, cx| {
1591 let grammar_changed = || {
1592 this.language.as_ref().is_none_or(|current_language| {
1593 !Arc::ptr_eq(&language, current_language)
1594 })
1595 };
1596 let language_registry_changed = || {
1597 new_syntax_map.contains_unknown_injections()
1598 && language_registry.is_some_and(|registry| {
1599 registry.version() != new_syntax_map.language_registry_version()
1600 })
1601 };
1602 let parse_again = this.version.changed_since(&parsed_version)
1603 || language_registry_changed()
1604 || grammar_changed();
1605 this.did_finish_parsing(new_syntax_map, cx);
1606 this.reparse = None;
1607 if parse_again {
1608 this.reparse(cx);
1609 }
1610 })
1611 .ok();
1612 }));
1613 }
1614 }
1615 }
1616
1617 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1618 self.was_changed();
1619 self.non_text_state_update_count += 1;
1620 self.syntax_map.lock().did_parse(syntax_snapshot);
1621 self.request_autoindent(cx);
1622 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1623 cx.emit(BufferEvent::Reparsed);
1624 cx.notify();
1625 }
1626
1627 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1628 self.parse_status.1.clone()
1629 }
1630
1631 /// Assign to the buffer a set of diagnostics created by a given language server.
1632 pub fn update_diagnostics(
1633 &mut self,
1634 server_id: LanguageServerId,
1635 diagnostics: DiagnosticSet,
1636 cx: &mut Context<Self>,
1637 ) {
1638 let lamport_timestamp = self.text.lamport_clock.tick();
1639 let op = Operation::UpdateDiagnostics {
1640 server_id,
1641 diagnostics: diagnostics.iter().cloned().collect(),
1642 lamport_timestamp,
1643 };
1644
1645 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1646 self.send_operation(op, true, cx);
1647 }
1648
1649 pub fn buffer_diagnostics(
1650 &self,
1651 for_server: Option<LanguageServerId>,
1652 ) -> Vec<&DiagnosticEntry<Anchor>> {
1653 match for_server {
1654 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1655 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1656 Err(_) => Vec::new(),
1657 },
1658 None => self
1659 .diagnostics
1660 .iter()
1661 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1662 .collect(),
1663 }
1664 }
1665
1666 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1667 if let Some(indent_sizes) = self.compute_autoindents() {
1668 let indent_sizes = cx.background_spawn(indent_sizes);
1669 match cx
1670 .background_executor()
1671 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1672 {
1673 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1674 Err(indent_sizes) => {
1675 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1676 let indent_sizes = indent_sizes.await;
1677 this.update(cx, |this, cx| {
1678 this.apply_autoindents(indent_sizes, cx);
1679 })
1680 .ok();
1681 }));
1682 }
1683 }
1684 } else {
1685 self.autoindent_requests.clear();
1686 for tx in self.wait_for_autoindent_txs.drain(..) {
1687 tx.send(()).ok();
1688 }
1689 }
1690 }
1691
1692 fn compute_autoindents(
1693 &self,
1694 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1695 let max_rows_between_yields = 100;
1696 let snapshot = self.snapshot();
1697 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1698 return None;
1699 }
1700
1701 let autoindent_requests = self.autoindent_requests.clone();
1702 Some(async move {
1703 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1704 for request in autoindent_requests {
1705 // Resolve each edited range to its row in the current buffer and in the
1706 // buffer before this batch of edits.
1707 let mut row_ranges = Vec::new();
1708 let mut old_to_new_rows = BTreeMap::new();
1709 let mut language_indent_sizes_by_new_row = Vec::new();
1710 for entry in &request.entries {
1711 let position = entry.range.start;
1712 let new_row = position.to_point(&snapshot).row;
1713 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1714 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1715
1716 if !entry.first_line_is_new {
1717 let old_row = position.to_point(&request.before_edit).row;
1718 old_to_new_rows.insert(old_row, new_row);
1719 }
1720 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1721 }
1722
1723 // Build a map containing the suggested indentation for each of the edited lines
1724 // with respect to the state of the buffer before these edits. This map is keyed
1725 // by the rows for these lines in the current state of the buffer.
1726 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1727 let old_edited_ranges =
1728 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1729 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1730 let mut language_indent_size = IndentSize::default();
1731 for old_edited_range in old_edited_ranges {
1732 let suggestions = request
1733 .before_edit
1734 .suggest_autoindents(old_edited_range.clone())
1735 .into_iter()
1736 .flatten();
1737 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1738 if let Some(suggestion) = suggestion {
1739 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1740
1741 // Find the indent size based on the language for this row.
1742 while let Some((row, size)) = language_indent_sizes.peek() {
1743 if *row > new_row {
1744 break;
1745 }
1746 language_indent_size = *size;
1747 language_indent_sizes.next();
1748 }
1749
1750 let suggested_indent = old_to_new_rows
1751 .get(&suggestion.basis_row)
1752 .and_then(|from_row| {
1753 Some(old_suggestions.get(from_row).copied()?.0)
1754 })
1755 .unwrap_or_else(|| {
1756 request
1757 .before_edit
1758 .indent_size_for_line(suggestion.basis_row)
1759 })
1760 .with_delta(suggestion.delta, language_indent_size);
1761 old_suggestions
1762 .insert(new_row, (suggested_indent, suggestion.within_error));
1763 }
1764 }
1765 yield_now().await;
1766 }
1767
1768 // Compute new suggestions for each line, but only include them in the result
1769 // if they differ from the old suggestion for that line.
1770 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1771 let mut language_indent_size = IndentSize::default();
1772 for (row_range, original_indent_column) in row_ranges {
1773 let new_edited_row_range = if request.is_block_mode {
1774 row_range.start..row_range.start + 1
1775 } else {
1776 row_range.clone()
1777 };
1778
1779 let suggestions = snapshot
1780 .suggest_autoindents(new_edited_row_range.clone())
1781 .into_iter()
1782 .flatten();
1783 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1784 if let Some(suggestion) = suggestion {
1785 // Find the indent size based on the language for this row.
1786 while let Some((row, size)) = language_indent_sizes.peek() {
1787 if *row > new_row {
1788 break;
1789 }
1790 language_indent_size = *size;
1791 language_indent_sizes.next();
1792 }
1793
1794 let suggested_indent = indent_sizes
1795 .get(&suggestion.basis_row)
1796 .copied()
1797 .map(|e| e.0)
1798 .unwrap_or_else(|| {
1799 snapshot.indent_size_for_line(suggestion.basis_row)
1800 })
1801 .with_delta(suggestion.delta, language_indent_size);
1802
1803 if old_suggestions.get(&new_row).is_none_or(
1804 |(old_indentation, was_within_error)| {
1805 suggested_indent != *old_indentation
1806 && (!suggestion.within_error || *was_within_error)
1807 },
1808 ) {
1809 indent_sizes.insert(
1810 new_row,
1811 (suggested_indent, request.ignore_empty_lines),
1812 );
1813 }
1814 }
1815 }
1816
1817 if let (true, Some(original_indent_column)) =
1818 (request.is_block_mode, original_indent_column)
1819 {
1820 let new_indent =
1821 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1822 *indent
1823 } else {
1824 snapshot.indent_size_for_line(row_range.start)
1825 };
1826 let delta = new_indent.len as i64 - original_indent_column as i64;
1827 if delta != 0 {
1828 for row in row_range.skip(1) {
1829 indent_sizes.entry(row).or_insert_with(|| {
1830 let mut size = snapshot.indent_size_for_line(row);
1831 if size.kind == new_indent.kind {
1832 match delta.cmp(&0) {
1833 Ordering::Greater => size.len += delta as u32,
1834 Ordering::Less => {
1835 size.len = size.len.saturating_sub(-delta as u32)
1836 }
1837 Ordering::Equal => {}
1838 }
1839 }
1840 (size, request.ignore_empty_lines)
1841 });
1842 }
1843 }
1844 }
1845
1846 yield_now().await;
1847 }
1848 }
1849
1850 indent_sizes
1851 .into_iter()
1852 .filter_map(|(row, (indent, ignore_empty_lines))| {
1853 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1854 None
1855 } else {
1856 Some((row, indent))
1857 }
1858 })
1859 .collect()
1860 })
1861 }
1862
1863 fn apply_autoindents(
1864 &mut self,
1865 indent_sizes: BTreeMap<u32, IndentSize>,
1866 cx: &mut Context<Self>,
1867 ) {
1868 self.autoindent_requests.clear();
1869 for tx in self.wait_for_autoindent_txs.drain(..) {
1870 tx.send(()).ok();
1871 }
1872
1873 let edits: Vec<_> = indent_sizes
1874 .into_iter()
1875 .filter_map(|(row, indent_size)| {
1876 let current_size = indent_size_for_line(self, row);
1877 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1878 })
1879 .collect();
1880
1881 let preserve_preview = self.preserve_preview();
1882 self.edit(edits, None, cx);
1883 if preserve_preview {
1884 self.refresh_preview();
1885 }
1886 }
1887
1888 /// Create a minimal edit that will cause the given row to be indented
1889 /// with the given size. After applying this edit, the length of the line
1890 /// will always be at least `new_size.len`.
1891 pub fn edit_for_indent_size_adjustment(
1892 row: u32,
1893 current_size: IndentSize,
1894 new_size: IndentSize,
1895 ) -> Option<(Range<Point>, String)> {
1896 if new_size.kind == current_size.kind {
1897 match new_size.len.cmp(¤t_size.len) {
1898 Ordering::Greater => {
1899 let point = Point::new(row, 0);
1900 Some((
1901 point..point,
1902 iter::repeat(new_size.char())
1903 .take((new_size.len - current_size.len) as usize)
1904 .collect::<String>(),
1905 ))
1906 }
1907
1908 Ordering::Less => Some((
1909 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1910 String::new(),
1911 )),
1912
1913 Ordering::Equal => None,
1914 }
1915 } else {
1916 Some((
1917 Point::new(row, 0)..Point::new(row, current_size.len),
1918 iter::repeat(new_size.char())
1919 .take(new_size.len as usize)
1920 .collect::<String>(),
1921 ))
1922 }
1923 }
1924
1925 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1926 /// and the given new text.
1927 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1928 let old_text = self.as_rope().clone();
1929 let base_version = self.version();
1930 cx.background_executor()
1931 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1932 let old_text = old_text.to_string();
1933 let line_ending = LineEnding::detect(&new_text);
1934 LineEnding::normalize(&mut new_text);
1935 let edits = text_diff(&old_text, &new_text);
1936 Diff {
1937 base_version,
1938 line_ending,
1939 edits,
1940 }
1941 })
1942 }
1943
1944 /// Spawns a background task that searches the buffer for any whitespace
1945 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1946 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1947 let old_text = self.as_rope().clone();
1948 let line_ending = self.line_ending();
1949 let base_version = self.version();
1950 cx.background_spawn(async move {
1951 let ranges = trailing_whitespace_ranges(&old_text);
1952 let empty = Arc::<str>::from("");
1953 Diff {
1954 base_version,
1955 line_ending,
1956 edits: ranges
1957 .into_iter()
1958 .map(|range| (range, empty.clone()))
1959 .collect(),
1960 }
1961 })
1962 }
1963
1964 /// Ensures that the buffer ends with a single newline character, and
1965 /// no other whitespace. Skips if the buffer is empty.
1966 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1967 let len = self.len();
1968 if len == 0 {
1969 return;
1970 }
1971 let mut offset = len;
1972 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1973 let non_whitespace_len = chunk
1974 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1975 .len();
1976 offset -= chunk.len();
1977 offset += non_whitespace_len;
1978 if non_whitespace_len != 0 {
1979 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1980 return;
1981 }
1982 break;
1983 }
1984 }
1985 self.edit([(offset..len, "\n")], None, cx);
1986 }
1987
1988 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1989 /// calculated, then adjust the diff to account for those changes, and discard any
1990 /// parts of the diff that conflict with those changes.
1991 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1992 let snapshot = self.snapshot();
1993 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1994 let mut delta = 0;
1995 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1996 while let Some(edit_since) = edits_since.peek() {
1997 // If the edit occurs after a diff hunk, then it does not
1998 // affect that hunk.
1999 if edit_since.old.start > range.end {
2000 break;
2001 }
2002 // If the edit precedes the diff hunk, then adjust the hunk
2003 // to reflect the edit.
2004 else if edit_since.old.end < range.start {
2005 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2006 edits_since.next();
2007 }
2008 // If the edit intersects a diff hunk, then discard that hunk.
2009 else {
2010 return None;
2011 }
2012 }
2013
2014 let start = (range.start as i64 + delta) as usize;
2015 let end = (range.end as i64 + delta) as usize;
2016 Some((start..end, new_text))
2017 });
2018
2019 self.start_transaction();
2020 self.text.set_line_ending(diff.line_ending);
2021 self.edit(adjusted_edits, None, cx);
2022 self.end_transaction(cx)
2023 }
2024
2025 pub fn has_unsaved_edits(&self) -> bool {
2026 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2027
2028 if last_version == self.version {
2029 self.has_unsaved_edits
2030 .set((last_version, has_unsaved_edits));
2031 return has_unsaved_edits;
2032 }
2033
2034 let has_edits = self.has_edits_since(&self.saved_version);
2035 self.has_unsaved_edits
2036 .set((self.version.clone(), has_edits));
2037 has_edits
2038 }
2039
2040 /// Checks if the buffer has unsaved changes.
2041 pub fn is_dirty(&self) -> bool {
2042 if self.capability == Capability::ReadOnly {
2043 return false;
2044 }
2045 if self.has_conflict {
2046 return true;
2047 }
2048 match self.file.as_ref().map(|f| f.disk_state()) {
2049 Some(DiskState::New) | Some(DiskState::Deleted) => {
2050 !self.is_empty() && self.has_unsaved_edits()
2051 }
2052 _ => self.has_unsaved_edits(),
2053 }
2054 }
2055
2056 /// Checks if the buffer and its file have both changed since the buffer
2057 /// was last saved or reloaded.
2058 pub fn has_conflict(&self) -> bool {
2059 if self.has_conflict {
2060 return true;
2061 }
2062 let Some(file) = self.file.as_ref() else {
2063 return false;
2064 };
2065 match file.disk_state() {
2066 DiskState::New => false,
2067 DiskState::Present { mtime } => match self.saved_mtime {
2068 Some(saved_mtime) => {
2069 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2070 }
2071 None => true,
2072 },
2073 DiskState::Deleted => false,
2074 }
2075 }
2076
2077 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2078 pub fn subscribe(&mut self) -> Subscription {
2079 self.text.subscribe()
2080 }
2081
2082 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2083 ///
2084 /// This allows downstream code to check if the buffer's text has changed without
2085 /// waiting for an effect cycle, which would be required if using eents.
2086 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2087 if let Err(ix) = self
2088 .change_bits
2089 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2090 {
2091 self.change_bits.insert(ix, bit);
2092 }
2093 }
2094
2095 /// Set the change bit for all "listeners".
2096 fn was_changed(&mut self) {
2097 self.change_bits.retain(|change_bit| {
2098 change_bit
2099 .upgrade()
2100 .inspect(|bit| {
2101 _ = bit.replace(true);
2102 })
2103 .is_some()
2104 });
2105 }
2106
2107 /// Starts a transaction, if one is not already in-progress. When undoing or
2108 /// redoing edits, all of the edits performed within a transaction are undone
2109 /// or redone together.
2110 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2111 self.start_transaction_at(Instant::now())
2112 }
2113
2114 /// Starts a transaction, providing the current time. Subsequent transactions
2115 /// that occur within a short period of time will be grouped together. This
2116 /// is controlled by the buffer's undo grouping duration.
2117 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2118 self.transaction_depth += 1;
2119 if self.was_dirty_before_starting_transaction.is_none() {
2120 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2121 }
2122 self.text.start_transaction_at(now)
2123 }
2124
2125 /// Terminates the current transaction, if this is the outermost transaction.
2126 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2127 self.end_transaction_at(Instant::now(), cx)
2128 }
2129
2130 /// Terminates the current transaction, providing the current time. Subsequent transactions
2131 /// that occur within a short period of time will be grouped together. This
2132 /// is controlled by the buffer's undo grouping duration.
2133 pub fn end_transaction_at(
2134 &mut self,
2135 now: Instant,
2136 cx: &mut Context<Self>,
2137 ) -> Option<TransactionId> {
2138 assert!(self.transaction_depth > 0);
2139 self.transaction_depth -= 1;
2140 let was_dirty = if self.transaction_depth == 0 {
2141 self.was_dirty_before_starting_transaction.take().unwrap()
2142 } else {
2143 false
2144 };
2145 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2146 self.did_edit(&start_version, was_dirty, cx);
2147 Some(transaction_id)
2148 } else {
2149 None
2150 }
2151 }
2152
2153 /// Manually add a transaction to the buffer's undo history.
2154 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2155 self.text.push_transaction(transaction, now);
2156 }
2157
2158 /// Differs from `push_transaction` in that it does not clear the redo
2159 /// stack. Intended to be used to create a parent transaction to merge
2160 /// potential child transactions into.
2161 ///
2162 /// The caller is responsible for removing it from the undo history using
2163 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2164 /// are merged into this transaction, the caller is responsible for ensuring
2165 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2166 /// cleared is to create transactions with the usual `start_transaction` and
2167 /// `end_transaction` methods and merging the resulting transactions into
2168 /// the transaction created by this method
2169 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2170 self.text.push_empty_transaction(now)
2171 }
2172
2173 /// Prevent the last transaction from being grouped with any subsequent transactions,
2174 /// even if they occur with the buffer's undo grouping duration.
2175 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2176 self.text.finalize_last_transaction()
2177 }
2178
2179 /// Manually group all changes since a given transaction.
2180 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2181 self.text.group_until_transaction(transaction_id);
2182 }
2183
2184 /// Manually remove a transaction from the buffer's undo history
2185 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2186 self.text.forget_transaction(transaction_id)
2187 }
2188
2189 /// Retrieve a transaction from the buffer's undo history
2190 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2191 self.text.get_transaction(transaction_id)
2192 }
2193
2194 /// Manually merge two transactions in the buffer's undo history.
2195 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2196 self.text.merge_transactions(transaction, destination);
2197 }
2198
2199 /// Waits for the buffer to receive operations with the given timestamps.
2200 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2201 &mut self,
2202 edit_ids: It,
2203 ) -> impl Future<Output = Result<()>> + use<It> {
2204 self.text.wait_for_edits(edit_ids)
2205 }
2206
2207 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2208 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2209 &mut self,
2210 anchors: It,
2211 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2212 self.text.wait_for_anchors(anchors)
2213 }
2214
2215 /// Waits for the buffer to receive operations up to the given version.
2216 pub fn wait_for_version(
2217 &mut self,
2218 version: clock::Global,
2219 ) -> impl Future<Output = Result<()>> + use<> {
2220 self.text.wait_for_version(version)
2221 }
2222
2223 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2224 /// [`Buffer::wait_for_version`] to resolve with an error.
2225 pub fn give_up_waiting(&mut self) {
2226 self.text.give_up_waiting();
2227 }
2228
2229 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2230 let mut rx = None;
2231 if !self.autoindent_requests.is_empty() {
2232 let channel = oneshot::channel();
2233 self.wait_for_autoindent_txs.push(channel.0);
2234 rx = Some(channel.1);
2235 }
2236 rx
2237 }
2238
2239 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2240 pub fn set_active_selections(
2241 &mut self,
2242 selections: Arc<[Selection<Anchor>]>,
2243 line_mode: bool,
2244 cursor_shape: CursorShape,
2245 cx: &mut Context<Self>,
2246 ) {
2247 let lamport_timestamp = self.text.lamport_clock.tick();
2248 self.remote_selections.insert(
2249 self.text.replica_id(),
2250 SelectionSet {
2251 selections: selections.clone(),
2252 lamport_timestamp,
2253 line_mode,
2254 cursor_shape,
2255 },
2256 );
2257 self.send_operation(
2258 Operation::UpdateSelections {
2259 selections,
2260 line_mode,
2261 lamport_timestamp,
2262 cursor_shape,
2263 },
2264 true,
2265 cx,
2266 );
2267 self.non_text_state_update_count += 1;
2268 cx.notify();
2269 }
2270
2271 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2272 /// this replica.
2273 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2274 if self
2275 .remote_selections
2276 .get(&self.text.replica_id())
2277 .is_none_or(|set| !set.selections.is_empty())
2278 {
2279 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2280 }
2281 }
2282
2283 pub fn set_agent_selections(
2284 &mut self,
2285 selections: Arc<[Selection<Anchor>]>,
2286 line_mode: bool,
2287 cursor_shape: CursorShape,
2288 cx: &mut Context<Self>,
2289 ) {
2290 let lamport_timestamp = self.text.lamport_clock.tick();
2291 self.remote_selections.insert(
2292 ReplicaId::AGENT,
2293 SelectionSet {
2294 selections,
2295 lamport_timestamp,
2296 line_mode,
2297 cursor_shape,
2298 },
2299 );
2300 self.non_text_state_update_count += 1;
2301 cx.notify();
2302 }
2303
2304 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2305 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2306 }
2307
2308 /// Replaces the buffer's entire text.
2309 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2310 where
2311 T: Into<Arc<str>>,
2312 {
2313 self.autoindent_requests.clear();
2314 self.edit([(0..self.len(), text)], None, cx)
2315 }
2316
2317 /// Appends the given text to the end of the buffer.
2318 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2319 where
2320 T: Into<Arc<str>>,
2321 {
2322 self.edit([(self.len()..self.len(), text)], None, cx)
2323 }
2324
2325 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2326 /// delete, and a string of text to insert at that location.
2327 ///
2328 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2329 /// request for the edited ranges, which will be processed when the buffer finishes
2330 /// parsing.
2331 ///
2332 /// Parsing takes place at the end of a transaction, and may compute synchronously
2333 /// or asynchronously, depending on the changes.
2334 pub fn edit<I, S, T>(
2335 &mut self,
2336 edits_iter: I,
2337 autoindent_mode: Option<AutoindentMode>,
2338 cx: &mut Context<Self>,
2339 ) -> Option<clock::Lamport>
2340 where
2341 I: IntoIterator<Item = (Range<S>, T)>,
2342 S: ToOffset,
2343 T: Into<Arc<str>>,
2344 {
2345 // Skip invalid edits and coalesce contiguous ones.
2346 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2347
2348 for (range, new_text) in edits_iter {
2349 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2350
2351 if range.start > range.end {
2352 mem::swap(&mut range.start, &mut range.end);
2353 }
2354 let new_text = new_text.into();
2355 if !new_text.is_empty() || !range.is_empty() {
2356 if let Some((prev_range, prev_text)) = edits.last_mut()
2357 && prev_range.end >= range.start
2358 {
2359 prev_range.end = cmp::max(prev_range.end, range.end);
2360 *prev_text = format!("{prev_text}{new_text}").into();
2361 } else {
2362 edits.push((range, new_text));
2363 }
2364 }
2365 }
2366 if edits.is_empty() {
2367 return None;
2368 }
2369
2370 self.start_transaction();
2371 self.pending_autoindent.take();
2372 let autoindent_request = autoindent_mode
2373 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2374
2375 let edit_operation = self
2376 .text
2377 .edit(edits.iter().cloned(), cx.background_executor());
2378 let edit_id = edit_operation.timestamp();
2379
2380 if let Some((before_edit, mode)) = autoindent_request {
2381 let mut delta = 0isize;
2382 let mut previous_setting = None;
2383 let entries: Vec<_> = edits
2384 .into_iter()
2385 .enumerate()
2386 .zip(&edit_operation.as_edit().unwrap().new_text)
2387 .filter(|((_, (range, _)), _)| {
2388 let language = before_edit.language_at(range.start);
2389 let language_id = language.map(|l| l.id());
2390 if let Some((cached_language_id, auto_indent)) = previous_setting
2391 && cached_language_id == language_id
2392 {
2393 auto_indent
2394 } else {
2395 // The auto-indent setting is not present in editorconfigs, hence
2396 // we can avoid passing the file here.
2397 let auto_indent =
2398 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2399 previous_setting = Some((language_id, auto_indent));
2400 auto_indent
2401 }
2402 })
2403 .map(|((ix, (range, _)), new_text)| {
2404 let new_text_length = new_text.len();
2405 let old_start = range.start.to_point(&before_edit);
2406 let new_start = (delta + range.start as isize) as usize;
2407 let range_len = range.end - range.start;
2408 delta += new_text_length as isize - range_len as isize;
2409
2410 // Decide what range of the insertion to auto-indent, and whether
2411 // the first line of the insertion should be considered a newly-inserted line
2412 // or an edit to an existing line.
2413 let mut range_of_insertion_to_indent = 0..new_text_length;
2414 let mut first_line_is_new = true;
2415
2416 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2417 let old_line_end = before_edit.line_len(old_start.row);
2418
2419 if old_start.column > old_line_start {
2420 first_line_is_new = false;
2421 }
2422
2423 if !new_text.contains('\n')
2424 && (old_start.column + (range_len as u32) < old_line_end
2425 || old_line_end == old_line_start)
2426 {
2427 first_line_is_new = false;
2428 }
2429
2430 // When inserting text starting with a newline, avoid auto-indenting the
2431 // previous line.
2432 if new_text.starts_with('\n') {
2433 range_of_insertion_to_indent.start += 1;
2434 first_line_is_new = true;
2435 }
2436
2437 let mut original_indent_column = None;
2438 if let AutoindentMode::Block {
2439 original_indent_columns,
2440 } = &mode
2441 {
2442 original_indent_column = Some(if new_text.starts_with('\n') {
2443 indent_size_for_text(
2444 new_text[range_of_insertion_to_indent.clone()].chars(),
2445 )
2446 .len
2447 } else {
2448 original_indent_columns
2449 .get(ix)
2450 .copied()
2451 .flatten()
2452 .unwrap_or_else(|| {
2453 indent_size_for_text(
2454 new_text[range_of_insertion_to_indent.clone()].chars(),
2455 )
2456 .len
2457 })
2458 });
2459
2460 // Avoid auto-indenting the line after the edit.
2461 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2462 range_of_insertion_to_indent.end -= 1;
2463 }
2464 }
2465
2466 AutoindentRequestEntry {
2467 first_line_is_new,
2468 original_indent_column,
2469 indent_size: before_edit.language_indent_size_at(range.start, cx),
2470 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2471 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2472 }
2473 })
2474 .collect();
2475
2476 if !entries.is_empty() {
2477 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2478 before_edit,
2479 entries,
2480 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2481 ignore_empty_lines: false,
2482 }));
2483 }
2484 }
2485
2486 self.end_transaction(cx);
2487 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2488 Some(edit_id)
2489 }
2490
2491 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2492 self.was_changed();
2493
2494 if self.edits_since::<usize>(old_version).next().is_none() {
2495 return;
2496 }
2497
2498 self.reparse(cx);
2499 cx.emit(BufferEvent::Edited);
2500 if was_dirty != self.is_dirty() {
2501 cx.emit(BufferEvent::DirtyChanged);
2502 }
2503 cx.notify();
2504 }
2505
2506 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2507 where
2508 I: IntoIterator<Item = Range<T>>,
2509 T: ToOffset + Copy,
2510 {
2511 let before_edit = self.snapshot();
2512 let entries = ranges
2513 .into_iter()
2514 .map(|range| AutoindentRequestEntry {
2515 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2516 first_line_is_new: true,
2517 indent_size: before_edit.language_indent_size_at(range.start, cx),
2518 original_indent_column: None,
2519 })
2520 .collect();
2521 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2522 before_edit,
2523 entries,
2524 is_block_mode: false,
2525 ignore_empty_lines: true,
2526 }));
2527 self.request_autoindent(cx);
2528 }
2529
2530 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2531 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2532 pub fn insert_empty_line(
2533 &mut self,
2534 position: impl ToPoint,
2535 space_above: bool,
2536 space_below: bool,
2537 cx: &mut Context<Self>,
2538 ) -> Point {
2539 let mut position = position.to_point(self);
2540
2541 self.start_transaction();
2542
2543 self.edit(
2544 [(position..position, "\n")],
2545 Some(AutoindentMode::EachLine),
2546 cx,
2547 );
2548
2549 if position.column > 0 {
2550 position += Point::new(1, 0);
2551 }
2552
2553 if !self.is_line_blank(position.row) {
2554 self.edit(
2555 [(position..position, "\n")],
2556 Some(AutoindentMode::EachLine),
2557 cx,
2558 );
2559 }
2560
2561 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2562 self.edit(
2563 [(position..position, "\n")],
2564 Some(AutoindentMode::EachLine),
2565 cx,
2566 );
2567 position.row += 1;
2568 }
2569
2570 if space_below
2571 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2572 {
2573 self.edit(
2574 [(position..position, "\n")],
2575 Some(AutoindentMode::EachLine),
2576 cx,
2577 );
2578 }
2579
2580 self.end_transaction(cx);
2581
2582 position
2583 }
2584
2585 /// Applies the given remote operations to the buffer.
2586 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2587 self.pending_autoindent.take();
2588 let was_dirty = self.is_dirty();
2589 let old_version = self.version.clone();
2590 let mut deferred_ops = Vec::new();
2591 let buffer_ops = ops
2592 .into_iter()
2593 .filter_map(|op| match op {
2594 Operation::Buffer(op) => Some(op),
2595 _ => {
2596 if self.can_apply_op(&op) {
2597 self.apply_op(op, cx);
2598 } else {
2599 deferred_ops.push(op);
2600 }
2601 None
2602 }
2603 })
2604 .collect::<Vec<_>>();
2605 for operation in buffer_ops.iter() {
2606 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2607 }
2608 self.text
2609 .apply_ops(buffer_ops, Some(cx.background_executor()));
2610 self.deferred_ops.insert(deferred_ops);
2611 self.flush_deferred_ops(cx);
2612 self.did_edit(&old_version, was_dirty, cx);
2613 // Notify independently of whether the buffer was edited as the operations could include a
2614 // selection update.
2615 cx.notify();
2616 }
2617
2618 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2619 let mut deferred_ops = Vec::new();
2620 for op in self.deferred_ops.drain().iter().cloned() {
2621 if self.can_apply_op(&op) {
2622 self.apply_op(op, cx);
2623 } else {
2624 deferred_ops.push(op);
2625 }
2626 }
2627 self.deferred_ops.insert(deferred_ops);
2628 }
2629
2630 pub fn has_deferred_ops(&self) -> bool {
2631 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2632 }
2633
2634 fn can_apply_op(&self, operation: &Operation) -> bool {
2635 match operation {
2636 Operation::Buffer(_) => {
2637 unreachable!("buffer operations should never be applied at this layer")
2638 }
2639 Operation::UpdateDiagnostics {
2640 diagnostics: diagnostic_set,
2641 ..
2642 } => diagnostic_set.iter().all(|diagnostic| {
2643 self.text.can_resolve(&diagnostic.range.start)
2644 && self.text.can_resolve(&diagnostic.range.end)
2645 }),
2646 Operation::UpdateSelections { selections, .. } => selections
2647 .iter()
2648 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2649 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2650 }
2651 }
2652
2653 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2654 match operation {
2655 Operation::Buffer(_) => {
2656 unreachable!("buffer operations should never be applied at this layer")
2657 }
2658 Operation::UpdateDiagnostics {
2659 server_id,
2660 diagnostics: diagnostic_set,
2661 lamport_timestamp,
2662 } => {
2663 let snapshot = self.snapshot();
2664 self.apply_diagnostic_update(
2665 server_id,
2666 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2667 lamport_timestamp,
2668 cx,
2669 );
2670 }
2671 Operation::UpdateSelections {
2672 selections,
2673 lamport_timestamp,
2674 line_mode,
2675 cursor_shape,
2676 } => {
2677 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2678 && set.lamport_timestamp > lamport_timestamp
2679 {
2680 return;
2681 }
2682
2683 self.remote_selections.insert(
2684 lamport_timestamp.replica_id,
2685 SelectionSet {
2686 selections,
2687 lamport_timestamp,
2688 line_mode,
2689 cursor_shape,
2690 },
2691 );
2692 self.text.lamport_clock.observe(lamport_timestamp);
2693 self.non_text_state_update_count += 1;
2694 }
2695 Operation::UpdateCompletionTriggers {
2696 triggers,
2697 lamport_timestamp,
2698 server_id,
2699 } => {
2700 if triggers.is_empty() {
2701 self.completion_triggers_per_language_server
2702 .remove(&server_id);
2703 self.completion_triggers = self
2704 .completion_triggers_per_language_server
2705 .values()
2706 .flat_map(|triggers| triggers.iter().cloned())
2707 .collect();
2708 } else {
2709 self.completion_triggers_per_language_server
2710 .insert(server_id, triggers.iter().cloned().collect());
2711 self.completion_triggers.extend(triggers);
2712 }
2713 self.text.lamport_clock.observe(lamport_timestamp);
2714 }
2715 Operation::UpdateLineEnding {
2716 line_ending,
2717 lamport_timestamp,
2718 } => {
2719 self.text.set_line_ending(line_ending);
2720 self.text.lamport_clock.observe(lamport_timestamp);
2721 }
2722 }
2723 }
2724
2725 fn apply_diagnostic_update(
2726 &mut self,
2727 server_id: LanguageServerId,
2728 diagnostics: DiagnosticSet,
2729 lamport_timestamp: clock::Lamport,
2730 cx: &mut Context<Self>,
2731 ) {
2732 if lamport_timestamp > self.diagnostics_timestamp {
2733 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2734 if diagnostics.is_empty() {
2735 if let Ok(ix) = ix {
2736 self.diagnostics.remove(ix);
2737 }
2738 } else {
2739 match ix {
2740 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2741 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2742 };
2743 }
2744 self.diagnostics_timestamp = lamport_timestamp;
2745 self.non_text_state_update_count += 1;
2746 self.text.lamport_clock.observe(lamport_timestamp);
2747 cx.notify();
2748 cx.emit(BufferEvent::DiagnosticsUpdated);
2749 }
2750 }
2751
2752 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2753 self.was_changed();
2754 cx.emit(BufferEvent::Operation {
2755 operation,
2756 is_local,
2757 });
2758 }
2759
2760 /// Removes the selections for a given peer.
2761 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2762 self.remote_selections.remove(&replica_id);
2763 cx.notify();
2764 }
2765
2766 /// Undoes the most recent transaction.
2767 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2768 let was_dirty = self.is_dirty();
2769 let old_version = self.version.clone();
2770
2771 if let Some((transaction_id, operation)) = self.text.undo() {
2772 self.send_operation(Operation::Buffer(operation), true, cx);
2773 self.did_edit(&old_version, was_dirty, cx);
2774 Some(transaction_id)
2775 } else {
2776 None
2777 }
2778 }
2779
2780 /// Manually undoes a specific transaction in the buffer's undo history.
2781 pub fn undo_transaction(
2782 &mut self,
2783 transaction_id: TransactionId,
2784 cx: &mut Context<Self>,
2785 ) -> bool {
2786 let was_dirty = self.is_dirty();
2787 let old_version = self.version.clone();
2788 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2789 self.send_operation(Operation::Buffer(operation), true, cx);
2790 self.did_edit(&old_version, was_dirty, cx);
2791 true
2792 } else {
2793 false
2794 }
2795 }
2796
2797 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2798 pub fn undo_to_transaction(
2799 &mut self,
2800 transaction_id: TransactionId,
2801 cx: &mut Context<Self>,
2802 ) -> bool {
2803 let was_dirty = self.is_dirty();
2804 let old_version = self.version.clone();
2805
2806 let operations = self.text.undo_to_transaction(transaction_id);
2807 let undone = !operations.is_empty();
2808 for operation in operations {
2809 self.send_operation(Operation::Buffer(operation), true, cx);
2810 }
2811 if undone {
2812 self.did_edit(&old_version, was_dirty, cx)
2813 }
2814 undone
2815 }
2816
2817 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2818 let was_dirty = self.is_dirty();
2819 let operation = self.text.undo_operations(counts);
2820 let old_version = self.version.clone();
2821 self.send_operation(Operation::Buffer(operation), true, cx);
2822 self.did_edit(&old_version, was_dirty, cx);
2823 }
2824
2825 /// Manually redoes a specific transaction in the buffer's redo history.
2826 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2827 let was_dirty = self.is_dirty();
2828 let old_version = self.version.clone();
2829
2830 if let Some((transaction_id, operation)) = self.text.redo() {
2831 self.send_operation(Operation::Buffer(operation), true, cx);
2832 self.did_edit(&old_version, was_dirty, cx);
2833 Some(transaction_id)
2834 } else {
2835 None
2836 }
2837 }
2838
2839 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2840 pub fn redo_to_transaction(
2841 &mut self,
2842 transaction_id: TransactionId,
2843 cx: &mut Context<Self>,
2844 ) -> bool {
2845 let was_dirty = self.is_dirty();
2846 let old_version = self.version.clone();
2847
2848 let operations = self.text.redo_to_transaction(transaction_id);
2849 let redone = !operations.is_empty();
2850 for operation in operations {
2851 self.send_operation(Operation::Buffer(operation), true, cx);
2852 }
2853 if redone {
2854 self.did_edit(&old_version, was_dirty, cx)
2855 }
2856 redone
2857 }
2858
2859 /// Override current completion triggers with the user-provided completion triggers.
2860 pub fn set_completion_triggers(
2861 &mut self,
2862 server_id: LanguageServerId,
2863 triggers: BTreeSet<String>,
2864 cx: &mut Context<Self>,
2865 ) {
2866 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2867 if triggers.is_empty() {
2868 self.completion_triggers_per_language_server
2869 .remove(&server_id);
2870 self.completion_triggers = self
2871 .completion_triggers_per_language_server
2872 .values()
2873 .flat_map(|triggers| triggers.iter().cloned())
2874 .collect();
2875 } else {
2876 self.completion_triggers_per_language_server
2877 .insert(server_id, triggers.clone());
2878 self.completion_triggers.extend(triggers.iter().cloned());
2879 }
2880 self.send_operation(
2881 Operation::UpdateCompletionTriggers {
2882 triggers: triggers.into_iter().collect(),
2883 lamport_timestamp: self.completion_triggers_timestamp,
2884 server_id,
2885 },
2886 true,
2887 cx,
2888 );
2889 cx.notify();
2890 }
2891
2892 /// Returns a list of strings which trigger a completion menu for this language.
2893 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2894 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2895 &self.completion_triggers
2896 }
2897
2898 /// Call this directly after performing edits to prevent the preview tab
2899 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2900 /// to return false until there are additional edits.
2901 pub fn refresh_preview(&mut self) {
2902 self.preview_version = self.version.clone();
2903 }
2904
2905 /// Whether we should preserve the preview status of a tab containing this buffer.
2906 pub fn preserve_preview(&self) -> bool {
2907 !self.has_edits_since(&self.preview_version)
2908 }
2909}
2910
2911#[doc(hidden)]
2912#[cfg(any(test, feature = "test-support"))]
2913impl Buffer {
2914 pub fn edit_via_marked_text(
2915 &mut self,
2916 marked_string: &str,
2917 autoindent_mode: Option<AutoindentMode>,
2918 cx: &mut Context<Self>,
2919 ) {
2920 let edits = self.edits_for_marked_text(marked_string);
2921 self.edit(edits, autoindent_mode, cx);
2922 }
2923
2924 pub fn set_group_interval(&mut self, group_interval: Duration) {
2925 self.text.set_group_interval(group_interval);
2926 }
2927
2928 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2929 where
2930 T: rand::Rng,
2931 {
2932 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2933 let mut last_end = None;
2934 for _ in 0..old_range_count {
2935 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2936 break;
2937 }
2938
2939 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2940 let mut range = self.random_byte_range(new_start, rng);
2941 if rng.random_bool(0.2) {
2942 mem::swap(&mut range.start, &mut range.end);
2943 }
2944 last_end = Some(range.end);
2945
2946 let new_text_len = rng.random_range(0..10);
2947 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2948 new_text = new_text.to_uppercase();
2949
2950 edits.push((range, new_text));
2951 }
2952 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2953 self.edit(edits, None, cx);
2954 }
2955
2956 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2957 let was_dirty = self.is_dirty();
2958 let old_version = self.version.clone();
2959
2960 let ops = self.text.randomly_undo_redo(rng);
2961 if !ops.is_empty() {
2962 for op in ops {
2963 self.send_operation(Operation::Buffer(op), true, cx);
2964 self.did_edit(&old_version, was_dirty, cx);
2965 }
2966 }
2967 }
2968}
2969
2970impl EventEmitter<BufferEvent> for Buffer {}
2971
2972impl Deref for Buffer {
2973 type Target = TextBuffer;
2974
2975 fn deref(&self) -> &Self::Target {
2976 &self.text
2977 }
2978}
2979
2980impl BufferSnapshot {
2981 /// Returns [`IndentSize`] for a given line that respects user settings and
2982 /// language preferences.
2983 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2984 indent_size_for_line(self, row)
2985 }
2986
2987 /// Returns [`IndentSize`] for a given position that respects user settings
2988 /// and language preferences.
2989 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2990 let settings = language_settings(
2991 self.language_at(position).map(|l| l.name()),
2992 self.file(),
2993 cx,
2994 );
2995 if settings.hard_tabs {
2996 IndentSize::tab()
2997 } else {
2998 IndentSize::spaces(settings.tab_size.get())
2999 }
3000 }
3001
3002 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3003 /// is passed in as `single_indent_size`.
3004 pub fn suggested_indents(
3005 &self,
3006 rows: impl Iterator<Item = u32>,
3007 single_indent_size: IndentSize,
3008 ) -> BTreeMap<u32, IndentSize> {
3009 let mut result = BTreeMap::new();
3010
3011 for row_range in contiguous_ranges(rows, 10) {
3012 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3013 Some(suggestions) => suggestions,
3014 _ => break,
3015 };
3016
3017 for (row, suggestion) in row_range.zip(suggestions) {
3018 let indent_size = if let Some(suggestion) = suggestion {
3019 result
3020 .get(&suggestion.basis_row)
3021 .copied()
3022 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3023 .with_delta(suggestion.delta, single_indent_size)
3024 } else {
3025 self.indent_size_for_line(row)
3026 };
3027
3028 result.insert(row, indent_size);
3029 }
3030 }
3031
3032 result
3033 }
3034
3035 fn suggest_autoindents(
3036 &self,
3037 row_range: Range<u32>,
3038 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3039 let config = &self.language.as_ref()?.config;
3040 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3041
3042 #[derive(Debug, Clone)]
3043 struct StartPosition {
3044 start: Point,
3045 suffix: SharedString,
3046 }
3047
3048 // Find the suggested indentation ranges based on the syntax tree.
3049 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3050 let end = Point::new(row_range.end, 0);
3051 let range = (start..end).to_offset(&self.text);
3052 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3053 Some(&grammar.indents_config.as_ref()?.query)
3054 });
3055 let indent_configs = matches
3056 .grammars()
3057 .iter()
3058 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3059 .collect::<Vec<_>>();
3060
3061 let mut indent_ranges = Vec::<Range<Point>>::new();
3062 let mut start_positions = Vec::<StartPosition>::new();
3063 let mut outdent_positions = Vec::<Point>::new();
3064 while let Some(mat) = matches.peek() {
3065 let mut start: Option<Point> = None;
3066 let mut end: Option<Point> = None;
3067
3068 let config = indent_configs[mat.grammar_index];
3069 for capture in mat.captures {
3070 if capture.index == config.indent_capture_ix {
3071 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3072 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3073 } else if Some(capture.index) == config.start_capture_ix {
3074 start = Some(Point::from_ts_point(capture.node.end_position()));
3075 } else if Some(capture.index) == config.end_capture_ix {
3076 end = Some(Point::from_ts_point(capture.node.start_position()));
3077 } else if Some(capture.index) == config.outdent_capture_ix {
3078 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3079 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3080 start_positions.push(StartPosition {
3081 start: Point::from_ts_point(capture.node.start_position()),
3082 suffix: suffix.clone(),
3083 });
3084 }
3085 }
3086
3087 matches.advance();
3088 if let Some((start, end)) = start.zip(end) {
3089 if start.row == end.row {
3090 continue;
3091 }
3092 let range = start..end;
3093 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3094 Err(ix) => indent_ranges.insert(ix, range),
3095 Ok(ix) => {
3096 let prev_range = &mut indent_ranges[ix];
3097 prev_range.end = prev_range.end.max(range.end);
3098 }
3099 }
3100 }
3101 }
3102
3103 let mut error_ranges = Vec::<Range<Point>>::new();
3104 let mut matches = self
3105 .syntax
3106 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3107 while let Some(mat) = matches.peek() {
3108 let node = mat.captures[0].node;
3109 let start = Point::from_ts_point(node.start_position());
3110 let end = Point::from_ts_point(node.end_position());
3111 let range = start..end;
3112 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3113 Ok(ix) | Err(ix) => ix,
3114 };
3115 let mut end_ix = ix;
3116 while let Some(existing_range) = error_ranges.get(end_ix) {
3117 if existing_range.end < end {
3118 end_ix += 1;
3119 } else {
3120 break;
3121 }
3122 }
3123 error_ranges.splice(ix..end_ix, [range]);
3124 matches.advance();
3125 }
3126
3127 outdent_positions.sort();
3128 for outdent_position in outdent_positions {
3129 // find the innermost indent range containing this outdent_position
3130 // set its end to the outdent position
3131 if let Some(range_to_truncate) = indent_ranges
3132 .iter_mut()
3133 .filter(|indent_range| indent_range.contains(&outdent_position))
3134 .next_back()
3135 {
3136 range_to_truncate.end = outdent_position;
3137 }
3138 }
3139
3140 start_positions.sort_by_key(|b| b.start);
3141
3142 // Find the suggested indentation increases and decreased based on regexes.
3143 let mut regex_outdent_map = HashMap::default();
3144 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3145 let mut start_positions_iter = start_positions.iter().peekable();
3146
3147 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3148 self.for_each_line(
3149 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3150 ..Point::new(row_range.end, 0),
3151 |row, line| {
3152 if config
3153 .decrease_indent_pattern
3154 .as_ref()
3155 .is_some_and(|regex| regex.is_match(line))
3156 {
3157 indent_change_rows.push((row, Ordering::Less));
3158 }
3159 if config
3160 .increase_indent_pattern
3161 .as_ref()
3162 .is_some_and(|regex| regex.is_match(line))
3163 {
3164 indent_change_rows.push((row + 1, Ordering::Greater));
3165 }
3166 while let Some(pos) = start_positions_iter.peek() {
3167 if pos.start.row < row {
3168 let pos = start_positions_iter.next().unwrap();
3169 last_seen_suffix
3170 .entry(pos.suffix.to_string())
3171 .or_default()
3172 .push(pos.start);
3173 } else {
3174 break;
3175 }
3176 }
3177 for rule in &config.decrease_indent_patterns {
3178 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3179 let row_start_column = self.indent_size_for_line(row).len;
3180 let basis_row = rule
3181 .valid_after
3182 .iter()
3183 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3184 .flatten()
3185 .filter(|start_point| start_point.column <= row_start_column)
3186 .max_by_key(|start_point| start_point.row);
3187 if let Some(outdent_to_row) = basis_row {
3188 regex_outdent_map.insert(row, outdent_to_row.row);
3189 }
3190 break;
3191 }
3192 }
3193 },
3194 );
3195
3196 let mut indent_changes = indent_change_rows.into_iter().peekable();
3197 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3198 prev_non_blank_row.unwrap_or(0)
3199 } else {
3200 row_range.start.saturating_sub(1)
3201 };
3202
3203 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3204 Some(row_range.map(move |row| {
3205 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3206
3207 let mut indent_from_prev_row = false;
3208 let mut outdent_from_prev_row = false;
3209 let mut outdent_to_row = u32::MAX;
3210 let mut from_regex = false;
3211
3212 while let Some((indent_row, delta)) = indent_changes.peek() {
3213 match indent_row.cmp(&row) {
3214 Ordering::Equal => match delta {
3215 Ordering::Less => {
3216 from_regex = true;
3217 outdent_from_prev_row = true
3218 }
3219 Ordering::Greater => {
3220 indent_from_prev_row = true;
3221 from_regex = true
3222 }
3223 _ => {}
3224 },
3225
3226 Ordering::Greater => break,
3227 Ordering::Less => {}
3228 }
3229
3230 indent_changes.next();
3231 }
3232
3233 for range in &indent_ranges {
3234 if range.start.row >= row {
3235 break;
3236 }
3237 if range.start.row == prev_row && range.end > row_start {
3238 indent_from_prev_row = true;
3239 }
3240 if range.end > prev_row_start && range.end <= row_start {
3241 outdent_to_row = outdent_to_row.min(range.start.row);
3242 }
3243 }
3244
3245 if let Some(basis_row) = regex_outdent_map.get(&row) {
3246 indent_from_prev_row = false;
3247 outdent_to_row = *basis_row;
3248 from_regex = true;
3249 }
3250
3251 let within_error = error_ranges
3252 .iter()
3253 .any(|e| e.start.row < row && e.end > row_start);
3254
3255 let suggestion = if outdent_to_row == prev_row
3256 || (outdent_from_prev_row && indent_from_prev_row)
3257 {
3258 Some(IndentSuggestion {
3259 basis_row: prev_row,
3260 delta: Ordering::Equal,
3261 within_error: within_error && !from_regex,
3262 })
3263 } else if indent_from_prev_row {
3264 Some(IndentSuggestion {
3265 basis_row: prev_row,
3266 delta: Ordering::Greater,
3267 within_error: within_error && !from_regex,
3268 })
3269 } else if outdent_to_row < prev_row {
3270 Some(IndentSuggestion {
3271 basis_row: outdent_to_row,
3272 delta: Ordering::Equal,
3273 within_error: within_error && !from_regex,
3274 })
3275 } else if outdent_from_prev_row {
3276 Some(IndentSuggestion {
3277 basis_row: prev_row,
3278 delta: Ordering::Less,
3279 within_error: within_error && !from_regex,
3280 })
3281 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3282 {
3283 Some(IndentSuggestion {
3284 basis_row: prev_row,
3285 delta: Ordering::Equal,
3286 within_error: within_error && !from_regex,
3287 })
3288 } else {
3289 None
3290 };
3291
3292 prev_row = row;
3293 prev_row_start = row_start;
3294 suggestion
3295 }))
3296 }
3297
3298 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3299 while row > 0 {
3300 row -= 1;
3301 if !self.is_line_blank(row) {
3302 return Some(row);
3303 }
3304 }
3305 None
3306 }
3307
3308 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3309 let captures = self.syntax.captures(range, &self.text, |grammar| {
3310 grammar
3311 .highlights_config
3312 .as_ref()
3313 .map(|config| &config.query)
3314 });
3315 let highlight_maps = captures
3316 .grammars()
3317 .iter()
3318 .map(|grammar| grammar.highlight_map())
3319 .collect();
3320 (captures, highlight_maps)
3321 }
3322
3323 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3324 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3325 /// returned in chunks where each chunk has a single syntax highlighting style and
3326 /// diagnostic status.
3327 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3328 let range = range.start.to_offset(self)..range.end.to_offset(self);
3329
3330 let mut syntax = None;
3331 if language_aware {
3332 syntax = Some(self.get_highlights(range.clone()));
3333 }
3334 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3335 let diagnostics = language_aware;
3336 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3337 }
3338
3339 pub fn highlighted_text_for_range<T: ToOffset>(
3340 &self,
3341 range: Range<T>,
3342 override_style: Option<HighlightStyle>,
3343 syntax_theme: &SyntaxTheme,
3344 ) -> HighlightedText {
3345 HighlightedText::from_buffer_range(
3346 range,
3347 &self.text,
3348 &self.syntax,
3349 override_style,
3350 syntax_theme,
3351 )
3352 }
3353
3354 /// Invokes the given callback for each line of text in the given range of the buffer.
3355 /// Uses callback to avoid allocating a string for each line.
3356 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3357 let mut line = String::new();
3358 let mut row = range.start.row;
3359 for chunk in self
3360 .as_rope()
3361 .chunks_in_range(range.to_offset(self))
3362 .chain(["\n"])
3363 {
3364 for (newline_ix, text) in chunk.split('\n').enumerate() {
3365 if newline_ix > 0 {
3366 callback(row, &line);
3367 row += 1;
3368 line.clear();
3369 }
3370 line.push_str(text);
3371 }
3372 }
3373 }
3374
3375 /// Iterates over every [`SyntaxLayer`] in the buffer.
3376 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3377 self.syntax_layers_for_range(0..self.len(), true)
3378 }
3379
3380 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3381 let offset = position.to_offset(self);
3382 self.syntax_layers_for_range(offset..offset, false)
3383 .filter(|l| l.node().end_byte() > offset)
3384 .last()
3385 }
3386
3387 pub fn syntax_layers_for_range<D: ToOffset>(
3388 &self,
3389 range: Range<D>,
3390 include_hidden: bool,
3391 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3392 self.syntax
3393 .layers_for_range(range, &self.text, include_hidden)
3394 }
3395
3396 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3397 &self,
3398 range: Range<D>,
3399 ) -> Option<SyntaxLayer<'_>> {
3400 let range = range.to_offset(self);
3401 self.syntax
3402 .layers_for_range(range, &self.text, false)
3403 .max_by(|a, b| {
3404 if a.depth != b.depth {
3405 a.depth.cmp(&b.depth)
3406 } else if a.offset.0 != b.offset.0 {
3407 a.offset.0.cmp(&b.offset.0)
3408 } else {
3409 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3410 }
3411 })
3412 }
3413
3414 /// Returns the main [`Language`].
3415 pub fn language(&self) -> Option<&Arc<Language>> {
3416 self.language.as_ref()
3417 }
3418
3419 /// Returns the [`Language`] at the given location.
3420 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3421 self.syntax_layer_at(position)
3422 .map(|info| info.language)
3423 .or(self.language.as_ref())
3424 }
3425
3426 /// Returns the settings for the language at the given location.
3427 pub fn settings_at<'a, D: ToOffset>(
3428 &'a self,
3429 position: D,
3430 cx: &'a App,
3431 ) -> Cow<'a, LanguageSettings> {
3432 language_settings(
3433 self.language_at(position).map(|l| l.name()),
3434 self.file.as_ref(),
3435 cx,
3436 )
3437 }
3438
3439 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3440 CharClassifier::new(self.language_scope_at(point))
3441 }
3442
3443 /// Returns the [`LanguageScope`] at the given location.
3444 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3445 let offset = position.to_offset(self);
3446 let mut scope = None;
3447 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3448
3449 // Use the layer that has the smallest node intersecting the given point.
3450 for layer in self
3451 .syntax
3452 .layers_for_range(offset..offset, &self.text, false)
3453 {
3454 let mut cursor = layer.node().walk();
3455
3456 let mut range = None;
3457 loop {
3458 let child_range = cursor.node().byte_range();
3459 if !child_range.contains(&offset) {
3460 break;
3461 }
3462
3463 range = Some(child_range);
3464 if cursor.goto_first_child_for_byte(offset).is_none() {
3465 break;
3466 }
3467 }
3468
3469 if let Some(range) = range
3470 && smallest_range_and_depth.as_ref().is_none_or(
3471 |(smallest_range, smallest_range_depth)| {
3472 if layer.depth > *smallest_range_depth {
3473 true
3474 } else if layer.depth == *smallest_range_depth {
3475 range.len() < smallest_range.len()
3476 } else {
3477 false
3478 }
3479 },
3480 )
3481 {
3482 smallest_range_and_depth = Some((range, layer.depth));
3483 scope = Some(LanguageScope {
3484 language: layer.language.clone(),
3485 override_id: layer.override_id(offset, &self.text),
3486 });
3487 }
3488 }
3489
3490 scope.or_else(|| {
3491 self.language.clone().map(|language| LanguageScope {
3492 language,
3493 override_id: None,
3494 })
3495 })
3496 }
3497
3498 /// Returns a tuple of the range and character kind of the word
3499 /// surrounding the given position.
3500 pub fn surrounding_word<T: ToOffset>(
3501 &self,
3502 start: T,
3503 scope_context: Option<CharScopeContext>,
3504 ) -> (Range<usize>, Option<CharKind>) {
3505 let mut start = start.to_offset(self);
3506 let mut end = start;
3507 let mut next_chars = self.chars_at(start).take(128).peekable();
3508 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3509
3510 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3511 let word_kind = cmp::max(
3512 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3513 next_chars.peek().copied().map(|c| classifier.kind(c)),
3514 );
3515
3516 for ch in prev_chars {
3517 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3518 start -= ch.len_utf8();
3519 } else {
3520 break;
3521 }
3522 }
3523
3524 for ch in next_chars {
3525 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3526 end += ch.len_utf8();
3527 } else {
3528 break;
3529 }
3530 }
3531
3532 (start..end, word_kind)
3533 }
3534
3535 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3536 /// range. When `require_larger` is true, the node found must be larger than the query range.
3537 ///
3538 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3539 /// be moved to the root of the tree.
3540 fn goto_node_enclosing_range(
3541 cursor: &mut tree_sitter::TreeCursor,
3542 query_range: &Range<usize>,
3543 require_larger: bool,
3544 ) -> bool {
3545 let mut ascending = false;
3546 loop {
3547 let mut range = cursor.node().byte_range();
3548 if query_range.is_empty() {
3549 // When the query range is empty and the current node starts after it, move to the
3550 // previous sibling to find the node the containing node.
3551 if range.start > query_range.start {
3552 cursor.goto_previous_sibling();
3553 range = cursor.node().byte_range();
3554 }
3555 } else {
3556 // When the query range is non-empty and the current node ends exactly at the start,
3557 // move to the next sibling to find a node that extends beyond the start.
3558 if range.end == query_range.start {
3559 cursor.goto_next_sibling();
3560 range = cursor.node().byte_range();
3561 }
3562 }
3563
3564 let encloses = range.contains_inclusive(query_range)
3565 && (!require_larger || range.len() > query_range.len());
3566 if !encloses {
3567 ascending = true;
3568 if !cursor.goto_parent() {
3569 return false;
3570 }
3571 continue;
3572 } else if ascending {
3573 return true;
3574 }
3575
3576 // Descend into the current node.
3577 if cursor
3578 .goto_first_child_for_byte(query_range.start)
3579 .is_none()
3580 {
3581 return true;
3582 }
3583 }
3584 }
3585
3586 pub fn syntax_ancestor<'a, T: ToOffset>(
3587 &'a self,
3588 range: Range<T>,
3589 ) -> Option<tree_sitter::Node<'a>> {
3590 let range = range.start.to_offset(self)..range.end.to_offset(self);
3591 let mut result: Option<tree_sitter::Node<'a>> = None;
3592 for layer in self
3593 .syntax
3594 .layers_for_range(range.clone(), &self.text, true)
3595 {
3596 let mut cursor = layer.node().walk();
3597
3598 // Find the node that both contains the range and is larger than it.
3599 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3600 continue;
3601 }
3602
3603 let left_node = cursor.node();
3604 let mut layer_result = left_node;
3605
3606 // For an empty range, try to find another node immediately to the right of the range.
3607 if left_node.end_byte() == range.start {
3608 let mut right_node = None;
3609 while !cursor.goto_next_sibling() {
3610 if !cursor.goto_parent() {
3611 break;
3612 }
3613 }
3614
3615 while cursor.node().start_byte() == range.start {
3616 right_node = Some(cursor.node());
3617 if !cursor.goto_first_child() {
3618 break;
3619 }
3620 }
3621
3622 // If there is a candidate node on both sides of the (empty) range, then
3623 // decide between the two by favoring a named node over an anonymous token.
3624 // If both nodes are the same in that regard, favor the right one.
3625 if let Some(right_node) = right_node
3626 && (right_node.is_named() || !left_node.is_named())
3627 {
3628 layer_result = right_node;
3629 }
3630 }
3631
3632 if let Some(previous_result) = &result
3633 && previous_result.byte_range().len() < layer_result.byte_range().len()
3634 {
3635 continue;
3636 }
3637 result = Some(layer_result);
3638 }
3639
3640 result
3641 }
3642
3643 /// Find the previous sibling syntax node at the given range.
3644 ///
3645 /// This function locates the syntax node that precedes the node containing
3646 /// the given range. It searches hierarchically by:
3647 /// 1. Finding the node that contains the given range
3648 /// 2. Looking for the previous sibling at the same tree level
3649 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3650 ///
3651 /// Returns `None` if there is no previous sibling at any ancestor level.
3652 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3653 &'a self,
3654 range: Range<T>,
3655 ) -> Option<tree_sitter::Node<'a>> {
3656 let range = range.start.to_offset(self)..range.end.to_offset(self);
3657 let mut result: Option<tree_sitter::Node<'a>> = None;
3658
3659 for layer in self
3660 .syntax
3661 .layers_for_range(range.clone(), &self.text, true)
3662 {
3663 let mut cursor = layer.node().walk();
3664
3665 // Find the node that contains the range
3666 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3667 continue;
3668 }
3669
3670 // Look for the previous sibling, moving up ancestor levels if needed
3671 loop {
3672 if cursor.goto_previous_sibling() {
3673 let layer_result = cursor.node();
3674
3675 if let Some(previous_result) = &result {
3676 if previous_result.byte_range().end < layer_result.byte_range().end {
3677 continue;
3678 }
3679 }
3680 result = Some(layer_result);
3681 break;
3682 }
3683
3684 // No sibling found at this level, try moving up to parent
3685 if !cursor.goto_parent() {
3686 break;
3687 }
3688 }
3689 }
3690
3691 result
3692 }
3693
3694 /// Find the next sibling syntax node at the given range.
3695 ///
3696 /// This function locates the syntax node that follows the node containing
3697 /// the given range. It searches hierarchically by:
3698 /// 1. Finding the node that contains the given range
3699 /// 2. Looking for the next sibling at the same tree level
3700 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3701 ///
3702 /// Returns `None` if there is no next sibling at any ancestor level.
3703 pub fn syntax_next_sibling<'a, T: ToOffset>(
3704 &'a self,
3705 range: Range<T>,
3706 ) -> Option<tree_sitter::Node<'a>> {
3707 let range = range.start.to_offset(self)..range.end.to_offset(self);
3708 let mut result: Option<tree_sitter::Node<'a>> = None;
3709
3710 for layer in self
3711 .syntax
3712 .layers_for_range(range.clone(), &self.text, true)
3713 {
3714 let mut cursor = layer.node().walk();
3715
3716 // Find the node that contains the range
3717 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3718 continue;
3719 }
3720
3721 // Look for the next sibling, moving up ancestor levels if needed
3722 loop {
3723 if cursor.goto_next_sibling() {
3724 let layer_result = cursor.node();
3725
3726 if let Some(previous_result) = &result {
3727 if previous_result.byte_range().start > layer_result.byte_range().start {
3728 continue;
3729 }
3730 }
3731 result = Some(layer_result);
3732 break;
3733 }
3734
3735 // No sibling found at this level, try moving up to parent
3736 if !cursor.goto_parent() {
3737 break;
3738 }
3739 }
3740 }
3741
3742 result
3743 }
3744
3745 /// Returns the root syntax node within the given row
3746 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3747 let start_offset = position.to_offset(self);
3748
3749 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3750
3751 let layer = self
3752 .syntax
3753 .layers_for_range(start_offset..start_offset, &self.text, true)
3754 .next()?;
3755
3756 let mut cursor = layer.node().walk();
3757
3758 // Descend to the first leaf that touches the start of the range.
3759 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3760 if cursor.node().end_byte() == start_offset {
3761 cursor.goto_next_sibling();
3762 }
3763 }
3764
3765 // Ascend to the root node within the same row.
3766 while cursor.goto_parent() {
3767 if cursor.node().start_position().row != row {
3768 break;
3769 }
3770 }
3771
3772 Some(cursor.node())
3773 }
3774
3775 /// Returns the outline for the buffer.
3776 ///
3777 /// This method allows passing an optional [`SyntaxTheme`] to
3778 /// syntax-highlight the returned symbols.
3779 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3780 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3781 }
3782
3783 /// Returns all the symbols that contain the given position.
3784 ///
3785 /// This method allows passing an optional [`SyntaxTheme`] to
3786 /// syntax-highlight the returned symbols.
3787 pub fn symbols_containing<T: ToOffset>(
3788 &self,
3789 position: T,
3790 theme: Option<&SyntaxTheme>,
3791 ) -> Vec<OutlineItem<Anchor>> {
3792 let position = position.to_offset(self);
3793 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3794 let end = self.clip_offset(position + 1, Bias::Right);
3795 let mut items = self.outline_items_containing(start..end, false, theme);
3796 let mut prev_depth = None;
3797 items.retain(|item| {
3798 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3799 prev_depth = Some(item.depth);
3800 result
3801 });
3802 items
3803 }
3804
3805 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3806 let range = range.to_offset(self);
3807 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3808 grammar.outline_config.as_ref().map(|c| &c.query)
3809 });
3810 let configs = matches
3811 .grammars()
3812 .iter()
3813 .map(|g| g.outline_config.as_ref().unwrap())
3814 .collect::<Vec<_>>();
3815
3816 while let Some(mat) = matches.peek() {
3817 let config = &configs[mat.grammar_index];
3818 let containing_item_node = maybe!({
3819 let item_node = mat.captures.iter().find_map(|cap| {
3820 if cap.index == config.item_capture_ix {
3821 Some(cap.node)
3822 } else {
3823 None
3824 }
3825 })?;
3826
3827 let item_byte_range = item_node.byte_range();
3828 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3829 None
3830 } else {
3831 Some(item_node)
3832 }
3833 });
3834
3835 if let Some(item_node) = containing_item_node {
3836 return Some(
3837 Point::from_ts_point(item_node.start_position())
3838 ..Point::from_ts_point(item_node.end_position()),
3839 );
3840 }
3841
3842 matches.advance();
3843 }
3844 None
3845 }
3846
3847 pub fn outline_items_containing<T: ToOffset>(
3848 &self,
3849 range: Range<T>,
3850 include_extra_context: bool,
3851 theme: Option<&SyntaxTheme>,
3852 ) -> Vec<OutlineItem<Anchor>> {
3853 self.outline_items_containing_internal(
3854 range,
3855 include_extra_context,
3856 theme,
3857 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3858 )
3859 }
3860
3861 pub fn outline_items_as_points_containing<T: ToOffset>(
3862 &self,
3863 range: Range<T>,
3864 include_extra_context: bool,
3865 theme: Option<&SyntaxTheme>,
3866 ) -> Vec<OutlineItem<Point>> {
3867 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3868 range
3869 })
3870 }
3871
3872 fn outline_items_containing_internal<T: ToOffset, U>(
3873 &self,
3874 range: Range<T>,
3875 include_extra_context: bool,
3876 theme: Option<&SyntaxTheme>,
3877 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3878 ) -> Vec<OutlineItem<U>> {
3879 let range = range.to_offset(self);
3880 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3881 grammar.outline_config.as_ref().map(|c| &c.query)
3882 });
3883
3884 let mut items = Vec::new();
3885 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3886 while let Some(mat) = matches.peek() {
3887 let config = matches.grammars()[mat.grammar_index]
3888 .outline_config
3889 .as_ref()
3890 .unwrap();
3891 if let Some(item) =
3892 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3893 {
3894 items.push(item);
3895 } else if let Some(capture) = mat
3896 .captures
3897 .iter()
3898 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3899 {
3900 let capture_range = capture.node.start_position()..capture.node.end_position();
3901 let mut capture_row_range =
3902 capture_range.start.row as u32..capture_range.end.row as u32;
3903 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3904 {
3905 capture_row_range.end -= 1;
3906 }
3907 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3908 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3909 last_row_range.end = capture_row_range.end;
3910 } else {
3911 annotation_row_ranges.push(capture_row_range);
3912 }
3913 } else {
3914 annotation_row_ranges.push(capture_row_range);
3915 }
3916 }
3917 matches.advance();
3918 }
3919
3920 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3921
3922 // Assign depths based on containment relationships and convert to anchors.
3923 let mut item_ends_stack = Vec::<Point>::new();
3924 let mut anchor_items = Vec::new();
3925 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3926 for item in items {
3927 while let Some(last_end) = item_ends_stack.last().copied() {
3928 if last_end < item.range.end {
3929 item_ends_stack.pop();
3930 } else {
3931 break;
3932 }
3933 }
3934
3935 let mut annotation_row_range = None;
3936 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3937 let row_preceding_item = item.range.start.row.saturating_sub(1);
3938 if next_annotation_row_range.end < row_preceding_item {
3939 annotation_row_ranges.next();
3940 } else {
3941 if next_annotation_row_range.end == row_preceding_item {
3942 annotation_row_range = Some(next_annotation_row_range.clone());
3943 annotation_row_ranges.next();
3944 }
3945 break;
3946 }
3947 }
3948
3949 anchor_items.push(OutlineItem {
3950 depth: item_ends_stack.len(),
3951 range: range_callback(self, item.range.clone()),
3952 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3953 text: item.text,
3954 highlight_ranges: item.highlight_ranges,
3955 name_ranges: item.name_ranges,
3956 body_range: item.body_range.map(|r| range_callback(self, r)),
3957 annotation_range: annotation_row_range.map(|annotation_range| {
3958 let point_range = Point::new(annotation_range.start, 0)
3959 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3960 range_callback(self, point_range)
3961 }),
3962 });
3963 item_ends_stack.push(item.range.end);
3964 }
3965
3966 anchor_items
3967 }
3968
3969 fn next_outline_item(
3970 &self,
3971 config: &OutlineConfig,
3972 mat: &SyntaxMapMatch,
3973 range: &Range<usize>,
3974 include_extra_context: bool,
3975 theme: Option<&SyntaxTheme>,
3976 ) -> Option<OutlineItem<Point>> {
3977 let item_node = mat.captures.iter().find_map(|cap| {
3978 if cap.index == config.item_capture_ix {
3979 Some(cap.node)
3980 } else {
3981 None
3982 }
3983 })?;
3984
3985 let item_byte_range = item_node.byte_range();
3986 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3987 return None;
3988 }
3989 let item_point_range = Point::from_ts_point(item_node.start_position())
3990 ..Point::from_ts_point(item_node.end_position());
3991
3992 let mut open_point = None;
3993 let mut close_point = None;
3994
3995 let mut buffer_ranges = Vec::new();
3996 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3997 let mut range = node.start_byte()..node.end_byte();
3998 let start = node.start_position();
3999 if node.end_position().row > start.row {
4000 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4001 }
4002
4003 if !range.is_empty() {
4004 buffer_ranges.push((range, node_is_name));
4005 }
4006 };
4007
4008 for capture in mat.captures {
4009 if capture.index == config.name_capture_ix {
4010 add_to_buffer_ranges(capture.node, true);
4011 } else if Some(capture.index) == config.context_capture_ix
4012 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4013 {
4014 add_to_buffer_ranges(capture.node, false);
4015 } else {
4016 if Some(capture.index) == config.open_capture_ix {
4017 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4018 } else if Some(capture.index) == config.close_capture_ix {
4019 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4020 }
4021 }
4022 }
4023
4024 if buffer_ranges.is_empty() {
4025 return None;
4026 }
4027 let source_range_for_text =
4028 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4029
4030 let mut text = String::new();
4031 let mut highlight_ranges = Vec::new();
4032 let mut name_ranges = Vec::new();
4033 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4034 let mut last_buffer_range_end = 0;
4035 for (buffer_range, is_name) in buffer_ranges {
4036 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4037 if space_added {
4038 text.push(' ');
4039 }
4040 let before_append_len = text.len();
4041 let mut offset = buffer_range.start;
4042 chunks.seek(buffer_range.clone());
4043 for mut chunk in chunks.by_ref() {
4044 if chunk.text.len() > buffer_range.end - offset {
4045 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4046 offset = buffer_range.end;
4047 } else {
4048 offset += chunk.text.len();
4049 }
4050 let style = chunk
4051 .syntax_highlight_id
4052 .zip(theme)
4053 .and_then(|(highlight, theme)| highlight.style(theme));
4054 if let Some(style) = style {
4055 let start = text.len();
4056 let end = start + chunk.text.len();
4057 highlight_ranges.push((start..end, style));
4058 }
4059 text.push_str(chunk.text);
4060 if offset >= buffer_range.end {
4061 break;
4062 }
4063 }
4064 if is_name {
4065 let after_append_len = text.len();
4066 let start = if space_added && !name_ranges.is_empty() {
4067 before_append_len - 1
4068 } else {
4069 before_append_len
4070 };
4071 name_ranges.push(start..after_append_len);
4072 }
4073 last_buffer_range_end = buffer_range.end;
4074 }
4075
4076 Some(OutlineItem {
4077 depth: 0, // We'll calculate the depth later
4078 range: item_point_range,
4079 source_range_for_text: source_range_for_text.to_point(self),
4080 text,
4081 highlight_ranges,
4082 name_ranges,
4083 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4084 annotation_range: None,
4085 })
4086 }
4087
4088 pub fn function_body_fold_ranges<T: ToOffset>(
4089 &self,
4090 within: Range<T>,
4091 ) -> impl Iterator<Item = Range<usize>> + '_ {
4092 self.text_object_ranges(within, TreeSitterOptions::default())
4093 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4094 }
4095
4096 /// For each grammar in the language, runs the provided
4097 /// [`tree_sitter::Query`] against the given range.
4098 pub fn matches(
4099 &self,
4100 range: Range<usize>,
4101 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4102 ) -> SyntaxMapMatches<'_> {
4103 self.syntax.matches(range, self, query)
4104 }
4105
4106 pub fn all_bracket_ranges(
4107 &self,
4108 range: Range<usize>,
4109 ) -> impl Iterator<Item = BracketMatch> + '_ {
4110 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4111 grammar.brackets_config.as_ref().map(|c| &c.query)
4112 });
4113 let configs = matches
4114 .grammars()
4115 .iter()
4116 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4117 .collect::<Vec<_>>();
4118
4119 iter::from_fn(move || {
4120 while let Some(mat) = matches.peek() {
4121 let mut open = None;
4122 let mut close = None;
4123 let config = &configs[mat.grammar_index];
4124 let pattern = &config.patterns[mat.pattern_index];
4125 for capture in mat.captures {
4126 if capture.index == config.open_capture_ix {
4127 open = Some(capture.node.byte_range());
4128 } else if capture.index == config.close_capture_ix {
4129 close = Some(capture.node.byte_range());
4130 }
4131 }
4132
4133 matches.advance();
4134
4135 let Some((open_range, close_range)) = open.zip(close) else {
4136 continue;
4137 };
4138
4139 let bracket_range = open_range.start..=close_range.end;
4140 if !bracket_range.overlaps(&range) {
4141 continue;
4142 }
4143
4144 return Some(BracketMatch {
4145 open_range,
4146 close_range,
4147 newline_only: pattern.newline_only,
4148 });
4149 }
4150 None
4151 })
4152 }
4153
4154 /// Returns bracket range pairs overlapping or adjacent to `range`
4155 pub fn bracket_ranges<T: ToOffset>(
4156 &self,
4157 range: Range<T>,
4158 ) -> impl Iterator<Item = BracketMatch> + '_ {
4159 // Find bracket pairs that *inclusively* contain the given range.
4160 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4161 self.all_bracket_ranges(range)
4162 .filter(|pair| !pair.newline_only)
4163 }
4164
4165 pub fn debug_variables_query<T: ToOffset>(
4166 &self,
4167 range: Range<T>,
4168 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4169 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4170
4171 let mut matches = self.syntax.matches_with_options(
4172 range.clone(),
4173 &self.text,
4174 TreeSitterOptions::default(),
4175 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4176 );
4177
4178 let configs = matches
4179 .grammars()
4180 .iter()
4181 .map(|grammar| grammar.debug_variables_config.as_ref())
4182 .collect::<Vec<_>>();
4183
4184 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4185
4186 iter::from_fn(move || {
4187 loop {
4188 while let Some(capture) = captures.pop() {
4189 if capture.0.overlaps(&range) {
4190 return Some(capture);
4191 }
4192 }
4193
4194 let mat = matches.peek()?;
4195
4196 let Some(config) = configs[mat.grammar_index].as_ref() else {
4197 matches.advance();
4198 continue;
4199 };
4200
4201 for capture in mat.captures {
4202 let Some(ix) = config
4203 .objects_by_capture_ix
4204 .binary_search_by_key(&capture.index, |e| e.0)
4205 .ok()
4206 else {
4207 continue;
4208 };
4209 let text_object = config.objects_by_capture_ix[ix].1;
4210 let byte_range = capture.node.byte_range();
4211
4212 let mut found = false;
4213 for (range, existing) in captures.iter_mut() {
4214 if existing == &text_object {
4215 range.start = range.start.min(byte_range.start);
4216 range.end = range.end.max(byte_range.end);
4217 found = true;
4218 break;
4219 }
4220 }
4221
4222 if !found {
4223 captures.push((byte_range, text_object));
4224 }
4225 }
4226
4227 matches.advance();
4228 }
4229 })
4230 }
4231
4232 pub fn text_object_ranges<T: ToOffset>(
4233 &self,
4234 range: Range<T>,
4235 options: TreeSitterOptions,
4236 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4237 let range =
4238 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4239
4240 let mut matches =
4241 self.syntax
4242 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4243 grammar.text_object_config.as_ref().map(|c| &c.query)
4244 });
4245
4246 let configs = matches
4247 .grammars()
4248 .iter()
4249 .map(|grammar| grammar.text_object_config.as_ref())
4250 .collect::<Vec<_>>();
4251
4252 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4253
4254 iter::from_fn(move || {
4255 loop {
4256 while let Some(capture) = captures.pop() {
4257 if capture.0.overlaps(&range) {
4258 return Some(capture);
4259 }
4260 }
4261
4262 let mat = matches.peek()?;
4263
4264 let Some(config) = configs[mat.grammar_index].as_ref() else {
4265 matches.advance();
4266 continue;
4267 };
4268
4269 for capture in mat.captures {
4270 let Some(ix) = config
4271 .text_objects_by_capture_ix
4272 .binary_search_by_key(&capture.index, |e| e.0)
4273 .ok()
4274 else {
4275 continue;
4276 };
4277 let text_object = config.text_objects_by_capture_ix[ix].1;
4278 let byte_range = capture.node.byte_range();
4279
4280 let mut found = false;
4281 for (range, existing) in captures.iter_mut() {
4282 if existing == &text_object {
4283 range.start = range.start.min(byte_range.start);
4284 range.end = range.end.max(byte_range.end);
4285 found = true;
4286 break;
4287 }
4288 }
4289
4290 if !found {
4291 captures.push((byte_range, text_object));
4292 }
4293 }
4294
4295 matches.advance();
4296 }
4297 })
4298 }
4299
4300 /// Returns enclosing bracket ranges containing the given range
4301 pub fn enclosing_bracket_ranges<T: ToOffset>(
4302 &self,
4303 range: Range<T>,
4304 ) -> impl Iterator<Item = BracketMatch> + '_ {
4305 let range = range.start.to_offset(self)..range.end.to_offset(self);
4306
4307 self.bracket_ranges(range.clone()).filter(move |pair| {
4308 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4309 })
4310 }
4311
4312 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4313 ///
4314 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4315 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4316 &self,
4317 range: Range<T>,
4318 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4319 ) -> Option<(Range<usize>, Range<usize>)> {
4320 let range = range.start.to_offset(self)..range.end.to_offset(self);
4321
4322 // Get the ranges of the innermost pair of brackets.
4323 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4324
4325 for pair in self.enclosing_bracket_ranges(range) {
4326 if let Some(range_filter) = range_filter
4327 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4328 {
4329 continue;
4330 }
4331
4332 let len = pair.close_range.end - pair.open_range.start;
4333
4334 if let Some((existing_open, existing_close)) = &result {
4335 let existing_len = existing_close.end - existing_open.start;
4336 if len > existing_len {
4337 continue;
4338 }
4339 }
4340
4341 result = Some((pair.open_range, pair.close_range));
4342 }
4343
4344 result
4345 }
4346
4347 /// Returns anchor ranges for any matches of the redaction query.
4348 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4349 /// will be run on the relevant section of the buffer.
4350 pub fn redacted_ranges<T: ToOffset>(
4351 &self,
4352 range: Range<T>,
4353 ) -> impl Iterator<Item = Range<usize>> + '_ {
4354 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4355 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4356 grammar
4357 .redactions_config
4358 .as_ref()
4359 .map(|config| &config.query)
4360 });
4361
4362 let configs = syntax_matches
4363 .grammars()
4364 .iter()
4365 .map(|grammar| grammar.redactions_config.as_ref())
4366 .collect::<Vec<_>>();
4367
4368 iter::from_fn(move || {
4369 let redacted_range = syntax_matches
4370 .peek()
4371 .and_then(|mat| {
4372 configs[mat.grammar_index].and_then(|config| {
4373 mat.captures
4374 .iter()
4375 .find(|capture| capture.index == config.redaction_capture_ix)
4376 })
4377 })
4378 .map(|mat| mat.node.byte_range());
4379 syntax_matches.advance();
4380 redacted_range
4381 })
4382 }
4383
4384 pub fn injections_intersecting_range<T: ToOffset>(
4385 &self,
4386 range: Range<T>,
4387 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4388 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4389
4390 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4391 grammar
4392 .injection_config
4393 .as_ref()
4394 .map(|config| &config.query)
4395 });
4396
4397 let configs = syntax_matches
4398 .grammars()
4399 .iter()
4400 .map(|grammar| grammar.injection_config.as_ref())
4401 .collect::<Vec<_>>();
4402
4403 iter::from_fn(move || {
4404 let ranges = syntax_matches.peek().and_then(|mat| {
4405 let config = &configs[mat.grammar_index]?;
4406 let content_capture_range = mat.captures.iter().find_map(|capture| {
4407 if capture.index == config.content_capture_ix {
4408 Some(capture.node.byte_range())
4409 } else {
4410 None
4411 }
4412 })?;
4413 let language = self.language_at(content_capture_range.start)?;
4414 Some((content_capture_range, language))
4415 });
4416 syntax_matches.advance();
4417 ranges
4418 })
4419 }
4420
4421 pub fn runnable_ranges(
4422 &self,
4423 offset_range: Range<usize>,
4424 ) -> impl Iterator<Item = RunnableRange> + '_ {
4425 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4426 grammar.runnable_config.as_ref().map(|config| &config.query)
4427 });
4428
4429 let test_configs = syntax_matches
4430 .grammars()
4431 .iter()
4432 .map(|grammar| grammar.runnable_config.as_ref())
4433 .collect::<Vec<_>>();
4434
4435 iter::from_fn(move || {
4436 loop {
4437 let mat = syntax_matches.peek()?;
4438
4439 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4440 let mut run_range = None;
4441 let full_range = mat.captures.iter().fold(
4442 Range {
4443 start: usize::MAX,
4444 end: 0,
4445 },
4446 |mut acc, next| {
4447 let byte_range = next.node.byte_range();
4448 if acc.start > byte_range.start {
4449 acc.start = byte_range.start;
4450 }
4451 if acc.end < byte_range.end {
4452 acc.end = byte_range.end;
4453 }
4454 acc
4455 },
4456 );
4457 if full_range.start > full_range.end {
4458 // We did not find a full spanning range of this match.
4459 return None;
4460 }
4461 let extra_captures: SmallVec<[_; 1]> =
4462 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4463 test_configs
4464 .extra_captures
4465 .get(capture.index as usize)
4466 .cloned()
4467 .and_then(|tag_name| match tag_name {
4468 RunnableCapture::Named(name) => {
4469 Some((capture.node.byte_range(), name))
4470 }
4471 RunnableCapture::Run => {
4472 let _ = run_range.insert(capture.node.byte_range());
4473 None
4474 }
4475 })
4476 }));
4477 let run_range = run_range?;
4478 let tags = test_configs
4479 .query
4480 .property_settings(mat.pattern_index)
4481 .iter()
4482 .filter_map(|property| {
4483 if *property.key == *"tag" {
4484 property
4485 .value
4486 .as_ref()
4487 .map(|value| RunnableTag(value.to_string().into()))
4488 } else {
4489 None
4490 }
4491 })
4492 .collect();
4493 let extra_captures = extra_captures
4494 .into_iter()
4495 .map(|(range, name)| {
4496 (
4497 name.to_string(),
4498 self.text_for_range(range).collect::<String>(),
4499 )
4500 })
4501 .collect();
4502 // All tags should have the same range.
4503 Some(RunnableRange {
4504 run_range,
4505 full_range,
4506 runnable: Runnable {
4507 tags,
4508 language: mat.language,
4509 buffer: self.remote_id(),
4510 },
4511 extra_captures,
4512 buffer_id: self.remote_id(),
4513 })
4514 });
4515
4516 syntax_matches.advance();
4517 if test_range.is_some() {
4518 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4519 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4520 return test_range;
4521 }
4522 }
4523 })
4524 }
4525
4526 /// Returns selections for remote peers intersecting the given range.
4527 #[allow(clippy::type_complexity)]
4528 pub fn selections_in_range(
4529 &self,
4530 range: Range<Anchor>,
4531 include_local: bool,
4532 ) -> impl Iterator<
4533 Item = (
4534 ReplicaId,
4535 bool,
4536 CursorShape,
4537 impl Iterator<Item = &Selection<Anchor>> + '_,
4538 ),
4539 > + '_ {
4540 self.remote_selections
4541 .iter()
4542 .filter(move |(replica_id, set)| {
4543 (include_local || **replica_id != self.text.replica_id())
4544 && !set.selections.is_empty()
4545 })
4546 .map(move |(replica_id, set)| {
4547 let start_ix = match set.selections.binary_search_by(|probe| {
4548 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4549 }) {
4550 Ok(ix) | Err(ix) => ix,
4551 };
4552 let end_ix = match set.selections.binary_search_by(|probe| {
4553 probe.start.cmp(&range.end, self).then(Ordering::Less)
4554 }) {
4555 Ok(ix) | Err(ix) => ix,
4556 };
4557
4558 (
4559 *replica_id,
4560 set.line_mode,
4561 set.cursor_shape,
4562 set.selections[start_ix..end_ix].iter(),
4563 )
4564 })
4565 }
4566
4567 /// Returns if the buffer contains any diagnostics.
4568 pub fn has_diagnostics(&self) -> bool {
4569 !self.diagnostics.is_empty()
4570 }
4571
4572 /// Returns all the diagnostics intersecting the given range.
4573 pub fn diagnostics_in_range<'a, T, O>(
4574 &'a self,
4575 search_range: Range<T>,
4576 reversed: bool,
4577 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4578 where
4579 T: 'a + Clone + ToOffset,
4580 O: 'a + FromAnchor,
4581 {
4582 let mut iterators: Vec<_> = self
4583 .diagnostics
4584 .iter()
4585 .map(|(_, collection)| {
4586 collection
4587 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4588 .peekable()
4589 })
4590 .collect();
4591
4592 std::iter::from_fn(move || {
4593 let (next_ix, _) = iterators
4594 .iter_mut()
4595 .enumerate()
4596 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4597 .min_by(|(_, a), (_, b)| {
4598 let cmp = a
4599 .range
4600 .start
4601 .cmp(&b.range.start, self)
4602 // when range is equal, sort by diagnostic severity
4603 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4604 // and stabilize order with group_id
4605 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4606 if reversed { cmp.reverse() } else { cmp }
4607 })?;
4608 iterators[next_ix]
4609 .next()
4610 .map(
4611 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4612 diagnostic,
4613 range: FromAnchor::from_anchor(&range.start, self)
4614 ..FromAnchor::from_anchor(&range.end, self),
4615 },
4616 )
4617 })
4618 }
4619
4620 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4621 /// should be used instead.
4622 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4623 &self.diagnostics
4624 }
4625
4626 /// Returns all the diagnostic groups associated with the given
4627 /// language server ID. If no language server ID is provided,
4628 /// all diagnostics groups are returned.
4629 pub fn diagnostic_groups(
4630 &self,
4631 language_server_id: Option<LanguageServerId>,
4632 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4633 let mut groups = Vec::new();
4634
4635 if let Some(language_server_id) = language_server_id {
4636 if let Ok(ix) = self
4637 .diagnostics
4638 .binary_search_by_key(&language_server_id, |e| e.0)
4639 {
4640 self.diagnostics[ix]
4641 .1
4642 .groups(language_server_id, &mut groups, self);
4643 }
4644 } else {
4645 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4646 diagnostics.groups(*language_server_id, &mut groups, self);
4647 }
4648 }
4649
4650 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4651 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4652 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4653 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4654 });
4655
4656 groups
4657 }
4658
4659 /// Returns an iterator over the diagnostics for the given group.
4660 pub fn diagnostic_group<O>(
4661 &self,
4662 group_id: usize,
4663 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4664 where
4665 O: FromAnchor + 'static,
4666 {
4667 self.diagnostics
4668 .iter()
4669 .flat_map(move |(_, set)| set.group(group_id, self))
4670 }
4671
4672 /// An integer version number that accounts for all updates besides
4673 /// the buffer's text itself (which is versioned via a version vector).
4674 pub fn non_text_state_update_count(&self) -> usize {
4675 self.non_text_state_update_count
4676 }
4677
4678 /// An integer version that changes when the buffer's syntax changes.
4679 pub fn syntax_update_count(&self) -> usize {
4680 self.syntax.update_count()
4681 }
4682
4683 /// Returns a snapshot of underlying file.
4684 pub fn file(&self) -> Option<&Arc<dyn File>> {
4685 self.file.as_ref()
4686 }
4687
4688 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4689 if let Some(file) = self.file() {
4690 if file.path().file_name().is_none() || include_root {
4691 Some(file.full_path(cx).to_string_lossy().into_owned())
4692 } else {
4693 Some(file.path().display(file.path_style(cx)).to_string())
4694 }
4695 } else {
4696 None
4697 }
4698 }
4699
4700 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4701 let query_str = query.fuzzy_contents;
4702 if query_str.is_some_and(|query| query.is_empty()) {
4703 return BTreeMap::default();
4704 }
4705
4706 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4707 language,
4708 override_id: None,
4709 }));
4710
4711 let mut query_ix = 0;
4712 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4713 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4714
4715 let mut words = BTreeMap::default();
4716 let mut current_word_start_ix = None;
4717 let mut chunk_ix = query.range.start;
4718 for chunk in self.chunks(query.range, false) {
4719 for (i, c) in chunk.text.char_indices() {
4720 let ix = chunk_ix + i;
4721 if classifier.is_word(c) {
4722 if current_word_start_ix.is_none() {
4723 current_word_start_ix = Some(ix);
4724 }
4725
4726 if let Some(query_chars) = &query_chars
4727 && query_ix < query_len
4728 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4729 {
4730 query_ix += 1;
4731 }
4732 continue;
4733 } else if let Some(word_start) = current_word_start_ix.take()
4734 && query_ix == query_len
4735 {
4736 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4737 let mut word_text = self.text_for_range(word_start..ix).peekable();
4738 let first_char = word_text
4739 .peek()
4740 .and_then(|first_chunk| first_chunk.chars().next());
4741 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4742 if !query.skip_digits
4743 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4744 {
4745 words.insert(word_text.collect(), word_range);
4746 }
4747 }
4748 query_ix = 0;
4749 }
4750 chunk_ix += chunk.text.len();
4751 }
4752
4753 words
4754 }
4755}
4756
4757pub struct WordsQuery<'a> {
4758 /// Only returns words with all chars from the fuzzy string in them.
4759 pub fuzzy_contents: Option<&'a str>,
4760 /// Skips words that start with a digit.
4761 pub skip_digits: bool,
4762 /// Buffer offset range, to look for words.
4763 pub range: Range<usize>,
4764}
4765
4766fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4767 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4768}
4769
4770fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4771 let mut result = IndentSize::spaces(0);
4772 for c in text {
4773 let kind = match c {
4774 ' ' => IndentKind::Space,
4775 '\t' => IndentKind::Tab,
4776 _ => break,
4777 };
4778 if result.len == 0 {
4779 result.kind = kind;
4780 }
4781 result.len += 1;
4782 }
4783 result
4784}
4785
4786impl Clone for BufferSnapshot {
4787 fn clone(&self) -> Self {
4788 Self {
4789 text: self.text.clone(),
4790 syntax: self.syntax.clone(),
4791 file: self.file.clone(),
4792 remote_selections: self.remote_selections.clone(),
4793 diagnostics: self.diagnostics.clone(),
4794 language: self.language.clone(),
4795 non_text_state_update_count: self.non_text_state_update_count,
4796 }
4797 }
4798}
4799
4800impl Deref for BufferSnapshot {
4801 type Target = text::BufferSnapshot;
4802
4803 fn deref(&self) -> &Self::Target {
4804 &self.text
4805 }
4806}
4807
4808unsafe impl Send for BufferChunks<'_> {}
4809
4810impl<'a> BufferChunks<'a> {
4811 pub(crate) fn new(
4812 text: &'a Rope,
4813 range: Range<usize>,
4814 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4815 diagnostics: bool,
4816 buffer_snapshot: Option<&'a BufferSnapshot>,
4817 ) -> Self {
4818 let mut highlights = None;
4819 if let Some((captures, highlight_maps)) = syntax {
4820 highlights = Some(BufferChunkHighlights {
4821 captures,
4822 next_capture: None,
4823 stack: Default::default(),
4824 highlight_maps,
4825 })
4826 }
4827
4828 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4829 let chunks = text.chunks_in_range(range.clone());
4830
4831 let mut this = BufferChunks {
4832 range,
4833 buffer_snapshot,
4834 chunks,
4835 diagnostic_endpoints,
4836 error_depth: 0,
4837 warning_depth: 0,
4838 information_depth: 0,
4839 hint_depth: 0,
4840 unnecessary_depth: 0,
4841 underline: true,
4842 highlights,
4843 };
4844 this.initialize_diagnostic_endpoints();
4845 this
4846 }
4847
4848 /// Seeks to the given byte offset in the buffer.
4849 pub fn seek(&mut self, range: Range<usize>) {
4850 let old_range = std::mem::replace(&mut self.range, range.clone());
4851 self.chunks.set_range(self.range.clone());
4852 if let Some(highlights) = self.highlights.as_mut() {
4853 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4854 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4855 highlights
4856 .stack
4857 .retain(|(end_offset, _)| *end_offset > range.start);
4858 if let Some(capture) = &highlights.next_capture
4859 && range.start >= capture.node.start_byte()
4860 {
4861 let next_capture_end = capture.node.end_byte();
4862 if range.start < next_capture_end {
4863 highlights.stack.push((
4864 next_capture_end,
4865 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4866 ));
4867 }
4868 highlights.next_capture.take();
4869 }
4870 } else if let Some(snapshot) = self.buffer_snapshot {
4871 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4872 *highlights = BufferChunkHighlights {
4873 captures,
4874 next_capture: None,
4875 stack: Default::default(),
4876 highlight_maps,
4877 };
4878 } else {
4879 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4880 // Seeking such BufferChunks is not supported.
4881 debug_assert!(
4882 false,
4883 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4884 );
4885 }
4886
4887 highlights.captures.set_byte_range(self.range.clone());
4888 self.initialize_diagnostic_endpoints();
4889 }
4890 }
4891
4892 fn initialize_diagnostic_endpoints(&mut self) {
4893 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4894 && let Some(buffer) = self.buffer_snapshot
4895 {
4896 let mut diagnostic_endpoints = Vec::new();
4897 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4898 diagnostic_endpoints.push(DiagnosticEndpoint {
4899 offset: entry.range.start,
4900 is_start: true,
4901 severity: entry.diagnostic.severity,
4902 is_unnecessary: entry.diagnostic.is_unnecessary,
4903 underline: entry.diagnostic.underline,
4904 });
4905 diagnostic_endpoints.push(DiagnosticEndpoint {
4906 offset: entry.range.end,
4907 is_start: false,
4908 severity: entry.diagnostic.severity,
4909 is_unnecessary: entry.diagnostic.is_unnecessary,
4910 underline: entry.diagnostic.underline,
4911 });
4912 }
4913 diagnostic_endpoints
4914 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4915 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4916 self.hint_depth = 0;
4917 self.error_depth = 0;
4918 self.warning_depth = 0;
4919 self.information_depth = 0;
4920 }
4921 }
4922
4923 /// The current byte offset in the buffer.
4924 pub fn offset(&self) -> usize {
4925 self.range.start
4926 }
4927
4928 pub fn range(&self) -> Range<usize> {
4929 self.range.clone()
4930 }
4931
4932 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4933 let depth = match endpoint.severity {
4934 DiagnosticSeverity::ERROR => &mut self.error_depth,
4935 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4936 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4937 DiagnosticSeverity::HINT => &mut self.hint_depth,
4938 _ => return,
4939 };
4940 if endpoint.is_start {
4941 *depth += 1;
4942 } else {
4943 *depth -= 1;
4944 }
4945
4946 if endpoint.is_unnecessary {
4947 if endpoint.is_start {
4948 self.unnecessary_depth += 1;
4949 } else {
4950 self.unnecessary_depth -= 1;
4951 }
4952 }
4953 }
4954
4955 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4956 if self.error_depth > 0 {
4957 Some(DiagnosticSeverity::ERROR)
4958 } else if self.warning_depth > 0 {
4959 Some(DiagnosticSeverity::WARNING)
4960 } else if self.information_depth > 0 {
4961 Some(DiagnosticSeverity::INFORMATION)
4962 } else if self.hint_depth > 0 {
4963 Some(DiagnosticSeverity::HINT)
4964 } else {
4965 None
4966 }
4967 }
4968
4969 fn current_code_is_unnecessary(&self) -> bool {
4970 self.unnecessary_depth > 0
4971 }
4972}
4973
4974impl<'a> Iterator for BufferChunks<'a> {
4975 type Item = Chunk<'a>;
4976
4977 fn next(&mut self) -> Option<Self::Item> {
4978 let mut next_capture_start = usize::MAX;
4979 let mut next_diagnostic_endpoint = usize::MAX;
4980
4981 if let Some(highlights) = self.highlights.as_mut() {
4982 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4983 if *parent_capture_end <= self.range.start {
4984 highlights.stack.pop();
4985 } else {
4986 break;
4987 }
4988 }
4989
4990 if highlights.next_capture.is_none() {
4991 highlights.next_capture = highlights.captures.next();
4992 }
4993
4994 while let Some(capture) = highlights.next_capture.as_ref() {
4995 if self.range.start < capture.node.start_byte() {
4996 next_capture_start = capture.node.start_byte();
4997 break;
4998 } else {
4999 let highlight_id =
5000 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5001 highlights
5002 .stack
5003 .push((capture.node.end_byte(), highlight_id));
5004 highlights.next_capture = highlights.captures.next();
5005 }
5006 }
5007 }
5008
5009 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5010 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5011 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5012 if endpoint.offset <= self.range.start {
5013 self.update_diagnostic_depths(endpoint);
5014 diagnostic_endpoints.next();
5015 self.underline = endpoint.underline;
5016 } else {
5017 next_diagnostic_endpoint = endpoint.offset;
5018 break;
5019 }
5020 }
5021 }
5022 self.diagnostic_endpoints = diagnostic_endpoints;
5023
5024 if let Some(ChunkBitmaps {
5025 text: chunk,
5026 chars: chars_map,
5027 tabs,
5028 }) = self.chunks.peek_with_bitmaps()
5029 {
5030 let chunk_start = self.range.start;
5031 let mut chunk_end = (self.chunks.offset() + chunk.len())
5032 .min(next_capture_start)
5033 .min(next_diagnostic_endpoint);
5034 let mut highlight_id = None;
5035 if let Some(highlights) = self.highlights.as_ref()
5036 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5037 {
5038 chunk_end = chunk_end.min(*parent_capture_end);
5039 highlight_id = Some(*parent_highlight_id);
5040 }
5041 let bit_start = chunk_start - self.chunks.offset();
5042 let bit_end = chunk_end - self.chunks.offset();
5043
5044 let slice = &chunk[bit_start..bit_end];
5045
5046 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5047 let tabs = (tabs >> bit_start) & mask;
5048 let chars = (chars_map >> bit_start) & mask;
5049
5050 self.range.start = chunk_end;
5051 if self.range.start == self.chunks.offset() + chunk.len() {
5052 self.chunks.next().unwrap();
5053 }
5054
5055 Some(Chunk {
5056 text: slice,
5057 syntax_highlight_id: highlight_id,
5058 underline: self.underline,
5059 diagnostic_severity: self.current_diagnostic_severity(),
5060 is_unnecessary: self.current_code_is_unnecessary(),
5061 tabs,
5062 chars,
5063 ..Chunk::default()
5064 })
5065 } else {
5066 None
5067 }
5068 }
5069}
5070
5071impl operation_queue::Operation for Operation {
5072 fn lamport_timestamp(&self) -> clock::Lamport {
5073 match self {
5074 Operation::Buffer(_) => {
5075 unreachable!("buffer operations should never be deferred at this layer")
5076 }
5077 Operation::UpdateDiagnostics {
5078 lamport_timestamp, ..
5079 }
5080 | Operation::UpdateSelections {
5081 lamport_timestamp, ..
5082 }
5083 | Operation::UpdateCompletionTriggers {
5084 lamport_timestamp, ..
5085 }
5086 | Operation::UpdateLineEnding {
5087 lamport_timestamp, ..
5088 } => *lamport_timestamp,
5089 }
5090 }
5091}
5092
5093impl Default for Diagnostic {
5094 fn default() -> Self {
5095 Self {
5096 source: Default::default(),
5097 source_kind: DiagnosticSourceKind::Other,
5098 code: None,
5099 code_description: None,
5100 severity: DiagnosticSeverity::ERROR,
5101 message: Default::default(),
5102 markdown: None,
5103 group_id: 0,
5104 is_primary: false,
5105 is_disk_based: false,
5106 is_unnecessary: false,
5107 underline: true,
5108 data: None,
5109 }
5110 }
5111}
5112
5113impl IndentSize {
5114 /// Returns an [`IndentSize`] representing the given spaces.
5115 pub fn spaces(len: u32) -> Self {
5116 Self {
5117 len,
5118 kind: IndentKind::Space,
5119 }
5120 }
5121
5122 /// Returns an [`IndentSize`] representing a tab.
5123 pub fn tab() -> Self {
5124 Self {
5125 len: 1,
5126 kind: IndentKind::Tab,
5127 }
5128 }
5129
5130 /// An iterator over the characters represented by this [`IndentSize`].
5131 pub fn chars(&self) -> impl Iterator<Item = char> {
5132 iter::repeat(self.char()).take(self.len as usize)
5133 }
5134
5135 /// The character representation of this [`IndentSize`].
5136 pub fn char(&self) -> char {
5137 match self.kind {
5138 IndentKind::Space => ' ',
5139 IndentKind::Tab => '\t',
5140 }
5141 }
5142
5143 /// Consumes the current [`IndentSize`] and returns a new one that has
5144 /// been shrunk or enlarged by the given size along the given direction.
5145 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5146 match direction {
5147 Ordering::Less => {
5148 if self.kind == size.kind && self.len >= size.len {
5149 self.len -= size.len;
5150 }
5151 }
5152 Ordering::Equal => {}
5153 Ordering::Greater => {
5154 if self.len == 0 {
5155 self = size;
5156 } else if self.kind == size.kind {
5157 self.len += size.len;
5158 }
5159 }
5160 }
5161 self
5162 }
5163
5164 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5165 match self.kind {
5166 IndentKind::Space => self.len as usize,
5167 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5168 }
5169 }
5170}
5171
5172#[cfg(any(test, feature = "test-support"))]
5173pub struct TestFile {
5174 pub path: Arc<RelPath>,
5175 pub root_name: String,
5176 pub local_root: Option<PathBuf>,
5177}
5178
5179#[cfg(any(test, feature = "test-support"))]
5180impl File for TestFile {
5181 fn path(&self) -> &Arc<RelPath> {
5182 &self.path
5183 }
5184
5185 fn full_path(&self, _: &gpui::App) -> PathBuf {
5186 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5187 }
5188
5189 fn as_local(&self) -> Option<&dyn LocalFile> {
5190 if self.local_root.is_some() {
5191 Some(self)
5192 } else {
5193 None
5194 }
5195 }
5196
5197 fn disk_state(&self) -> DiskState {
5198 unimplemented!()
5199 }
5200
5201 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5202 self.path().file_name().unwrap_or(self.root_name.as_ref())
5203 }
5204
5205 fn worktree_id(&self, _: &App) -> WorktreeId {
5206 WorktreeId::from_usize(0)
5207 }
5208
5209 fn to_proto(&self, _: &App) -> rpc::proto::File {
5210 unimplemented!()
5211 }
5212
5213 fn is_private(&self) -> bool {
5214 false
5215 }
5216
5217 fn path_style(&self, _cx: &App) -> PathStyle {
5218 PathStyle::local()
5219 }
5220}
5221
5222#[cfg(any(test, feature = "test-support"))]
5223impl LocalFile for TestFile {
5224 fn abs_path(&self, _cx: &App) -> PathBuf {
5225 PathBuf::from(self.local_root.as_ref().unwrap())
5226 .join(&self.root_name)
5227 .join(self.path.as_std_path())
5228 }
5229
5230 fn load(
5231 &self,
5232 _cx: &App,
5233 _encoding: EncodingWrapper,
5234 _detect_utf16: bool,
5235 ) -> Task<Result<String>> {
5236 unimplemented!()
5237 }
5238
5239 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5240 unimplemented!()
5241 }
5242}
5243
5244pub(crate) fn contiguous_ranges(
5245 values: impl Iterator<Item = u32>,
5246 max_len: usize,
5247) -> impl Iterator<Item = Range<u32>> {
5248 let mut values = values;
5249 let mut current_range: Option<Range<u32>> = None;
5250 std::iter::from_fn(move || {
5251 loop {
5252 if let Some(value) = values.next() {
5253 if let Some(range) = &mut current_range
5254 && value == range.end
5255 && range.len() < max_len
5256 {
5257 range.end += 1;
5258 continue;
5259 }
5260
5261 let prev_range = current_range.clone();
5262 current_range = Some(value..(value + 1));
5263 if prev_range.is_some() {
5264 return prev_range;
5265 }
5266 } else {
5267 return current_range.take();
5268 }
5269 }
5270 })
5271}
5272
5273#[derive(Default, Debug)]
5274pub struct CharClassifier {
5275 scope: Option<LanguageScope>,
5276 scope_context: Option<CharScopeContext>,
5277 ignore_punctuation: bool,
5278}
5279
5280impl CharClassifier {
5281 pub fn new(scope: Option<LanguageScope>) -> Self {
5282 Self {
5283 scope,
5284 scope_context: None,
5285 ignore_punctuation: false,
5286 }
5287 }
5288
5289 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5290 Self {
5291 scope_context,
5292 ..self
5293 }
5294 }
5295
5296 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5297 Self {
5298 ignore_punctuation,
5299 ..self
5300 }
5301 }
5302
5303 pub fn is_whitespace(&self, c: char) -> bool {
5304 self.kind(c) == CharKind::Whitespace
5305 }
5306
5307 pub fn is_word(&self, c: char) -> bool {
5308 self.kind(c) == CharKind::Word
5309 }
5310
5311 pub fn is_punctuation(&self, c: char) -> bool {
5312 self.kind(c) == CharKind::Punctuation
5313 }
5314
5315 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5316 if c.is_alphanumeric() || c == '_' {
5317 return CharKind::Word;
5318 }
5319
5320 if let Some(scope) = &self.scope {
5321 let characters = match self.scope_context {
5322 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5323 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5324 None => scope.word_characters(),
5325 };
5326 if let Some(characters) = characters
5327 && characters.contains(&c)
5328 {
5329 return CharKind::Word;
5330 }
5331 }
5332
5333 if c.is_whitespace() {
5334 return CharKind::Whitespace;
5335 }
5336
5337 if ignore_punctuation {
5338 CharKind::Word
5339 } else {
5340 CharKind::Punctuation
5341 }
5342 }
5343
5344 pub fn kind(&self, c: char) -> CharKind {
5345 self.kind_with(c, self.ignore_punctuation)
5346 }
5347}
5348
5349/// Find all of the ranges of whitespace that occur at the ends of lines
5350/// in the given rope.
5351///
5352/// This could also be done with a regex search, but this implementation
5353/// avoids copying text.
5354pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5355 let mut ranges = Vec::new();
5356
5357 let mut offset = 0;
5358 let mut prev_chunk_trailing_whitespace_range = 0..0;
5359 for chunk in rope.chunks() {
5360 let mut prev_line_trailing_whitespace_range = 0..0;
5361 for (i, line) in chunk.split('\n').enumerate() {
5362 let line_end_offset = offset + line.len();
5363 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5364 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5365
5366 if i == 0 && trimmed_line_len == 0 {
5367 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5368 }
5369 if !prev_line_trailing_whitespace_range.is_empty() {
5370 ranges.push(prev_line_trailing_whitespace_range);
5371 }
5372
5373 offset = line_end_offset + 1;
5374 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5375 }
5376
5377 offset -= 1;
5378 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5379 }
5380
5381 if !prev_chunk_trailing_whitespace_range.is_empty() {
5382 ranges.push(prev_chunk_trailing_whitespace_range);
5383 }
5384
5385 ranges
5386}