1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encoding_rs::Encoding;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<std::sync::Mutex<&'static Encoding>>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376}
377
378/// The file's storage status - whether it's stored (`Present`), and if so when it was last
379/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
380/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
381/// indicator for new files.
382#[derive(Copy, Clone, Debug, PartialEq)]
383pub enum DiskState {
384 /// File created in Zed that has not been saved.
385 New,
386 /// File present on the filesystem.
387 Present { mtime: MTime },
388 /// Deleted file that was previously present.
389 Deleted,
390}
391
392impl DiskState {
393 /// Returns the file's last known modification time on disk.
394 pub fn mtime(self) -> Option<MTime> {
395 match self {
396 DiskState::New => None,
397 DiskState::Present { mtime } => Some(mtime),
398 DiskState::Deleted => None,
399 }
400 }
401
402 pub fn exists(&self) -> bool {
403 match self {
404 DiskState::New => false,
405 DiskState::Present { .. } => true,
406 DiskState::Deleted => false,
407 }
408 }
409}
410
411/// The file associated with a buffer, in the case where the file is on the local disk.
412pub trait LocalFile: File {
413 /// Returns the absolute path of this file
414 fn abs_path(&self, cx: &App) -> PathBuf;
415
416 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
417 fn load(&self, cx: &App) -> Task<Result<String>>;
418
419 /// Loads the file's contents from disk.
420 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
421
422 /// Loads the file contents from disk, decoding them with the given encoding.
423 fn load_with_encoding(
424 &self,
425 cx: &App,
426 encoding: &'static Encoding,
427 force: bool, // whether to force the encoding even if a BOM is present
428 buffer_encoding: Arc<std::sync::Mutex<&'static Encoding>>,
429 ) -> Task<Result<String>>;
430}
431
432/// The auto-indent behavior associated with an editing operation.
433/// For some editing operations, each affected line of text has its
434/// indentation recomputed. For other operations, the entire block
435/// of edited text is adjusted uniformly.
436#[derive(Clone, Debug)]
437pub enum AutoindentMode {
438 /// Indent each line of inserted text.
439 EachLine,
440 /// Apply the same indentation adjustment to all of the lines
441 /// in a given insertion.
442 Block {
443 /// The original indentation column of the first line of each
444 /// insertion, if it has been copied.
445 ///
446 /// Knowing this makes it possible to preserve the relative indentation
447 /// of every line in the insertion from when it was copied.
448 ///
449 /// If the original indent column is `a`, and the first line of insertion
450 /// is then auto-indented to column `b`, then every other line of
451 /// the insertion will be auto-indented to column `b - a`
452 original_indent_columns: Vec<Option<u32>>,
453 },
454}
455
456#[derive(Clone)]
457struct AutoindentRequest {
458 before_edit: BufferSnapshot,
459 entries: Vec<AutoindentRequestEntry>,
460 is_block_mode: bool,
461 ignore_empty_lines: bool,
462}
463
464#[derive(Debug, Clone)]
465struct AutoindentRequestEntry {
466 /// A range of the buffer whose indentation should be adjusted.
467 range: Range<Anchor>,
468 /// Whether or not these lines should be considered brand new, for the
469 /// purpose of auto-indent. When text is not new, its indentation will
470 /// only be adjusted if the suggested indentation level has *changed*
471 /// since the edit was made.
472 first_line_is_new: bool,
473 indent_size: IndentSize,
474 original_indent_column: Option<u32>,
475}
476
477#[derive(Debug)]
478struct IndentSuggestion {
479 basis_row: u32,
480 delta: Ordering,
481 within_error: bool,
482}
483
484struct BufferChunkHighlights<'a> {
485 captures: SyntaxMapCaptures<'a>,
486 next_capture: Option<SyntaxMapCapture<'a>>,
487 stack: Vec<(usize, HighlightId)>,
488 highlight_maps: Vec<HighlightMap>,
489}
490
491/// An iterator that yields chunks of a buffer's text, along with their
492/// syntax highlights and diagnostic status.
493pub struct BufferChunks<'a> {
494 buffer_snapshot: Option<&'a BufferSnapshot>,
495 range: Range<usize>,
496 chunks: text::Chunks<'a>,
497 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
498 error_depth: usize,
499 warning_depth: usize,
500 information_depth: usize,
501 hint_depth: usize,
502 unnecessary_depth: usize,
503 underline: bool,
504 highlights: Option<BufferChunkHighlights<'a>>,
505}
506
507/// A chunk of a buffer's text, along with its syntax highlight and
508/// diagnostic status.
509#[derive(Clone, Debug, Default)]
510pub struct Chunk<'a> {
511 /// The text of the chunk.
512 pub text: &'a str,
513 /// The syntax highlighting style of the chunk.
514 pub syntax_highlight_id: Option<HighlightId>,
515 /// The highlight style that has been applied to this chunk in
516 /// the editor.
517 pub highlight_style: Option<HighlightStyle>,
518 /// The severity of diagnostic associated with this chunk, if any.
519 pub diagnostic_severity: Option<DiagnosticSeverity>,
520 /// A bitset of which characters are tabs in this string.
521 pub tabs: u128,
522 /// Bitmap of character indices in this chunk
523 pub chars: u128,
524 /// Whether this chunk of text is marked as unnecessary.
525 pub is_unnecessary: bool,
526 /// Whether this chunk of text was originally a tab character.
527 pub is_tab: bool,
528 /// Whether this chunk of text was originally an inlay.
529 pub is_inlay: bool,
530 /// Whether to underline the corresponding text range in the editor.
531 pub underline: bool,
532}
533
534/// A set of edits to a given version of a buffer, computed asynchronously.
535#[derive(Debug)]
536pub struct Diff {
537 pub base_version: clock::Global,
538 pub line_ending: LineEnding,
539 pub edits: Vec<(Range<usize>, Arc<str>)>,
540}
541
542#[derive(Debug, Clone, Copy)]
543pub(crate) struct DiagnosticEndpoint {
544 offset: usize,
545 is_start: bool,
546 underline: bool,
547 severity: DiagnosticSeverity,
548 is_unnecessary: bool,
549}
550
551/// A class of characters, used for characterizing a run of text.
552#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
553pub enum CharKind {
554 /// Whitespace.
555 Whitespace,
556 /// Punctuation.
557 Punctuation,
558 /// Word.
559 Word,
560}
561
562/// Context for character classification within a specific scope.
563#[derive(Copy, Clone, Eq, PartialEq, Debug)]
564pub enum CharScopeContext {
565 /// Character classification for completion queries.
566 ///
567 /// This context treats certain characters as word constituents that would
568 /// normally be considered punctuation, such as '-' in Tailwind classes
569 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
570 Completion,
571 /// Character classification for linked edits.
572 ///
573 /// This context handles characters that should be treated as part of
574 /// identifiers during linked editing operations, such as '.' in JSX
575 /// component names like `<Animated.View>`.
576 LinkedEdit,
577}
578
579/// A runnable is a set of data about a region that could be resolved into a task
580pub struct Runnable {
581 pub tags: SmallVec<[RunnableTag; 1]>,
582 pub language: Arc<Language>,
583 pub buffer: BufferId,
584}
585
586#[derive(Default, Clone, Debug)]
587pub struct HighlightedText {
588 pub text: SharedString,
589 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
590}
591
592#[derive(Default, Debug)]
593struct HighlightedTextBuilder {
594 pub text: String,
595 highlights: Vec<(Range<usize>, HighlightStyle)>,
596}
597
598impl HighlightedText {
599 pub fn from_buffer_range<T: ToOffset>(
600 range: Range<T>,
601 snapshot: &text::BufferSnapshot,
602 syntax_snapshot: &SyntaxSnapshot,
603 override_style: Option<HighlightStyle>,
604 syntax_theme: &SyntaxTheme,
605 ) -> Self {
606 let mut highlighted_text = HighlightedTextBuilder::default();
607 highlighted_text.add_text_from_buffer_range(
608 range,
609 snapshot,
610 syntax_snapshot,
611 override_style,
612 syntax_theme,
613 );
614 highlighted_text.build()
615 }
616
617 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
618 gpui::StyledText::new(self.text.clone())
619 .with_default_highlights(default_style, self.highlights.iter().cloned())
620 }
621
622 /// Returns the first line without leading whitespace unless highlighted
623 /// and a boolean indicating if there are more lines after
624 pub fn first_line_preview(self) -> (Self, bool) {
625 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
626 let first_line = &self.text[..newline_ix];
627
628 // Trim leading whitespace, unless an edit starts prior to it.
629 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
630 if let Some((first_highlight_range, _)) = self.highlights.first() {
631 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
632 }
633
634 let preview_text = &first_line[preview_start_ix..];
635 let preview_highlights = self
636 .highlights
637 .into_iter()
638 .skip_while(|(range, _)| range.end <= preview_start_ix)
639 .take_while(|(range, _)| range.start < newline_ix)
640 .filter_map(|(mut range, highlight)| {
641 range.start = range.start.saturating_sub(preview_start_ix);
642 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
643 if range.is_empty() {
644 None
645 } else {
646 Some((range, highlight))
647 }
648 });
649
650 let preview = Self {
651 text: SharedString::new(preview_text),
652 highlights: preview_highlights.collect(),
653 };
654
655 (preview, self.text.len() > newline_ix)
656 }
657}
658
659impl HighlightedTextBuilder {
660 pub fn build(self) -> HighlightedText {
661 HighlightedText {
662 text: self.text.into(),
663 highlights: self.highlights,
664 }
665 }
666
667 pub fn add_text_from_buffer_range<T: ToOffset>(
668 &mut self,
669 range: Range<T>,
670 snapshot: &text::BufferSnapshot,
671 syntax_snapshot: &SyntaxSnapshot,
672 override_style: Option<HighlightStyle>,
673 syntax_theme: &SyntaxTheme,
674 ) {
675 let range = range.to_offset(snapshot);
676 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
677 let start = self.text.len();
678 self.text.push_str(chunk.text);
679 let end = self.text.len();
680
681 if let Some(highlight_style) = chunk
682 .syntax_highlight_id
683 .and_then(|id| id.style(syntax_theme))
684 {
685 let highlight_style = override_style.map_or(highlight_style, |override_style| {
686 highlight_style.highlight(override_style)
687 });
688 self.highlights.push((start..end, highlight_style));
689 } else if let Some(override_style) = override_style {
690 self.highlights.push((start..end, override_style));
691 }
692 }
693 }
694
695 fn highlighted_chunks<'a>(
696 range: Range<usize>,
697 snapshot: &'a text::BufferSnapshot,
698 syntax_snapshot: &'a SyntaxSnapshot,
699 ) -> BufferChunks<'a> {
700 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
701 grammar
702 .highlights_config
703 .as_ref()
704 .map(|config| &config.query)
705 });
706
707 let highlight_maps = captures
708 .grammars()
709 .iter()
710 .map(|grammar| grammar.highlight_map())
711 .collect();
712
713 BufferChunks::new(
714 snapshot.as_rope(),
715 range,
716 Some((captures, highlight_maps)),
717 false,
718 None,
719 )
720 }
721}
722
723#[derive(Clone)]
724pub struct EditPreview {
725 old_snapshot: text::BufferSnapshot,
726 applied_edits_snapshot: text::BufferSnapshot,
727 syntax_snapshot: SyntaxSnapshot,
728}
729
730impl EditPreview {
731 pub fn highlight_edits(
732 &self,
733 current_snapshot: &BufferSnapshot,
734 edits: &[(Range<Anchor>, String)],
735 include_deletions: bool,
736 cx: &App,
737 ) -> HighlightedText {
738 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
739 return HighlightedText::default();
740 };
741
742 let mut highlighted_text = HighlightedTextBuilder::default();
743
744 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
745
746 let insertion_highlight_style = HighlightStyle {
747 background_color: Some(cx.theme().status().created_background),
748 ..Default::default()
749 };
750 let deletion_highlight_style = HighlightStyle {
751 background_color: Some(cx.theme().status().deleted_background),
752 ..Default::default()
753 };
754 let syntax_theme = cx.theme().syntax();
755
756 for (range, edit_text) in edits {
757 let edit_new_end_in_preview_snapshot = range
758 .end
759 .bias_right(&self.old_snapshot)
760 .to_offset(&self.applied_edits_snapshot);
761 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
762
763 let unchanged_range_in_preview_snapshot =
764 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
765 if !unchanged_range_in_preview_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 unchanged_range_in_preview_snapshot,
768 &self.applied_edits_snapshot,
769 &self.syntax_snapshot,
770 None,
771 syntax_theme,
772 );
773 }
774
775 let range_in_current_snapshot = range.to_offset(current_snapshot);
776 if include_deletions && !range_in_current_snapshot.is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 range_in_current_snapshot,
779 ¤t_snapshot.text,
780 ¤t_snapshot.syntax,
781 Some(deletion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 if !edit_text.is_empty() {
787 highlighted_text.add_text_from_buffer_range(
788 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
789 &self.applied_edits_snapshot,
790 &self.syntax_snapshot,
791 Some(insertion_highlight_style),
792 syntax_theme,
793 );
794 }
795
796 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
797 }
798
799 highlighted_text.add_text_from_buffer_range(
800 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
801 &self.applied_edits_snapshot,
802 &self.syntax_snapshot,
803 None,
804 syntax_theme,
805 );
806
807 highlighted_text.build()
808 }
809
810 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
811 let (first, _) = edits.first()?;
812 let (last, _) = edits.last()?;
813
814 let start = first
815 .start
816 .bias_left(&self.old_snapshot)
817 .to_point(&self.applied_edits_snapshot);
818 let end = last
819 .end
820 .bias_right(&self.old_snapshot)
821 .to_point(&self.applied_edits_snapshot);
822
823 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
824 let range = Point::new(start.row, 0)
825 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
826
827 Some(range.to_offset(&self.applied_edits_snapshot))
828 }
829}
830
831#[derive(Clone, Debug, PartialEq, Eq)]
832pub struct BracketMatch {
833 pub open_range: Range<usize>,
834 pub close_range: Range<usize>,
835 pub newline_only: bool,
836}
837
838impl Buffer {
839 /// Create a new buffer with the given base text.
840 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
841 Self::build(
842 TextBuffer::new(
843 ReplicaId::LOCAL,
844 cx.entity_id().as_non_zero_u64().into(),
845 base_text.into(),
846 &cx.background_executor(),
847 ),
848 None,
849 Capability::ReadWrite,
850 )
851 }
852
853 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
854 pub fn local_normalized(
855 base_text_normalized: Rope,
856 line_ending: LineEnding,
857 cx: &Context<Self>,
858 ) -> Self {
859 Self::build(
860 TextBuffer::new_normalized(
861 ReplicaId::LOCAL,
862 cx.entity_id().as_non_zero_u64().into(),
863 line_ending,
864 base_text_normalized,
865 ),
866 None,
867 Capability::ReadWrite,
868 )
869 }
870
871 /// Create a new buffer that is a replica of a remote buffer.
872 pub fn remote(
873 remote_id: BufferId,
874 replica_id: ReplicaId,
875 capability: Capability,
876 base_text: impl Into<String>,
877 cx: &BackgroundExecutor,
878 ) -> Self {
879 Self::build(
880 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
881 None,
882 capability,
883 )
884 }
885
886 /// Create a new buffer that is a replica of a remote buffer, populating its
887 /// state from the given protobuf message.
888 pub fn from_proto(
889 replica_id: ReplicaId,
890 capability: Capability,
891 message: proto::BufferState,
892 file: Option<Arc<dyn File>>,
893 cx: &BackgroundExecutor,
894 ) -> Result<Self> {
895 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
896 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
897 let mut this = Self::build(buffer, file, capability);
898 this.text.set_line_ending(proto::deserialize_line_ending(
899 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
900 ));
901 this.saved_version = proto::deserialize_version(&message.saved_version);
902 this.saved_mtime = message.saved_mtime.map(|time| time.into());
903 Ok(this)
904 }
905
906 /// Serialize the buffer's state to a protobuf message.
907 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
908 proto::BufferState {
909 id: self.remote_id().into(),
910 file: self.file.as_ref().map(|f| f.to_proto(cx)),
911 base_text: self.base_text().to_string(),
912 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
913 saved_version: proto::serialize_version(&self.saved_version),
914 saved_mtime: self.saved_mtime.map(|time| time.into()),
915 }
916 }
917
918 /// Serialize as protobufs all of the changes to the buffer since the given version.
919 pub fn serialize_ops(
920 &self,
921 since: Option<clock::Global>,
922 cx: &App,
923 ) -> Task<Vec<proto::Operation>> {
924 let mut operations = Vec::new();
925 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
926
927 operations.extend(self.remote_selections.iter().map(|(_, set)| {
928 proto::serialize_operation(&Operation::UpdateSelections {
929 selections: set.selections.clone(),
930 lamport_timestamp: set.lamport_timestamp,
931 line_mode: set.line_mode,
932 cursor_shape: set.cursor_shape,
933 })
934 }));
935
936 for (server_id, diagnostics) in &self.diagnostics {
937 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
938 lamport_timestamp: self.diagnostics_timestamp,
939 server_id: *server_id,
940 diagnostics: diagnostics.iter().cloned().collect(),
941 }));
942 }
943
944 for (server_id, completions) in &self.completion_triggers_per_language_server {
945 operations.push(proto::serialize_operation(
946 &Operation::UpdateCompletionTriggers {
947 triggers: completions.iter().cloned().collect(),
948 lamport_timestamp: self.completion_triggers_timestamp,
949 server_id: *server_id,
950 },
951 ));
952 }
953
954 let text_operations = self.text.operations().clone();
955 cx.background_spawn(async move {
956 let since = since.unwrap_or_default();
957 operations.extend(
958 text_operations
959 .iter()
960 .filter(|(_, op)| !since.observed(op.timestamp()))
961 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
962 );
963 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
964 operations
965 })
966 }
967
968 /// Assign a language to the buffer, returning the buffer.
969 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
970 self.set_language(Some(language), cx);
971 self
972 }
973
974 /// Returns the [`Capability`] of this buffer.
975 pub fn capability(&self) -> Capability {
976 self.capability
977 }
978
979 /// Whether this buffer can only be read.
980 pub fn read_only(&self) -> bool {
981 self.capability == Capability::ReadOnly
982 }
983
984 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
985 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
986 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
987 let snapshot = buffer.snapshot();
988 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
989 Self {
990 saved_mtime,
991 saved_version: buffer.version(),
992 preview_version: buffer.version(),
993 reload_task: None,
994 transaction_depth: 0,
995 was_dirty_before_starting_transaction: None,
996 has_unsaved_edits: Cell::new((buffer.version(), false)),
997 text: buffer,
998 branch_state: None,
999 file,
1000 capability,
1001 syntax_map,
1002 reparse: None,
1003 non_text_state_update_count: 0,
1004 sync_parse_timeout: Duration::from_millis(1),
1005 parse_status: watch::channel(ParseStatus::Idle),
1006 autoindent_requests: Default::default(),
1007 wait_for_autoindent_txs: Default::default(),
1008 pending_autoindent: Default::default(),
1009 language: None,
1010 remote_selections: Default::default(),
1011 diagnostics: Default::default(),
1012 diagnostics_timestamp: Lamport::MIN,
1013 completion_triggers: Default::default(),
1014 completion_triggers_per_language_server: Default::default(),
1015 completion_triggers_timestamp: Lamport::MIN,
1016 deferred_ops: OperationQueue::new(),
1017 has_conflict: false,
1018 change_bits: Default::default(),
1019 _subscriptions: Vec::new(),
1020 encoding: Arc::new(std::sync::Mutex::new(encoding_rs::UTF_8)),
1021 }
1022 }
1023
1024 pub fn build_snapshot(
1025 text: Rope,
1026 language: Option<Arc<Language>>,
1027 language_registry: Option<Arc<LanguageRegistry>>,
1028 cx: &mut App,
1029 ) -> impl Future<Output = BufferSnapshot> + use<> {
1030 let entity_id = cx.reserve_entity::<Self>().entity_id();
1031 let buffer_id = entity_id.as_non_zero_u64().into();
1032 async move {
1033 let text =
1034 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1035 .snapshot();
1036 let mut syntax = SyntaxMap::new(&text).snapshot();
1037 if let Some(language) = language.clone() {
1038 let language_registry = language_registry.clone();
1039 syntax.reparse(&text, language_registry, language);
1040 }
1041 BufferSnapshot {
1042 text,
1043 syntax,
1044 file: None,
1045 diagnostics: Default::default(),
1046 remote_selections: Default::default(),
1047 language,
1048 non_text_state_update_count: 0,
1049 }
1050 }
1051 }
1052
1053 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1054 let entity_id = cx.reserve_entity::<Self>().entity_id();
1055 let buffer_id = entity_id.as_non_zero_u64().into();
1056 let text = TextBuffer::new_normalized(
1057 ReplicaId::LOCAL,
1058 buffer_id,
1059 Default::default(),
1060 Rope::new(),
1061 )
1062 .snapshot();
1063 let syntax = SyntaxMap::new(&text).snapshot();
1064 BufferSnapshot {
1065 text,
1066 syntax,
1067 file: None,
1068 diagnostics: Default::default(),
1069 remote_selections: Default::default(),
1070 language: None,
1071 non_text_state_update_count: 0,
1072 }
1073 }
1074
1075 #[cfg(any(test, feature = "test-support"))]
1076 pub fn build_snapshot_sync(
1077 text: Rope,
1078 language: Option<Arc<Language>>,
1079 language_registry: Option<Arc<LanguageRegistry>>,
1080 cx: &mut App,
1081 ) -> BufferSnapshot {
1082 let entity_id = cx.reserve_entity::<Self>().entity_id();
1083 let buffer_id = entity_id.as_non_zero_u64().into();
1084 let text =
1085 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1086 .snapshot();
1087 let mut syntax = SyntaxMap::new(&text).snapshot();
1088 if let Some(language) = language.clone() {
1089 syntax.reparse(&text, language_registry, language);
1090 }
1091 BufferSnapshot {
1092 text,
1093 syntax,
1094 file: None,
1095 diagnostics: Default::default(),
1096 remote_selections: Default::default(),
1097 language,
1098 non_text_state_update_count: 0,
1099 }
1100 }
1101
1102 /// Retrieve a snapshot of the buffer's current state. This is computationally
1103 /// cheap, and allows reading from the buffer on a background thread.
1104 pub fn snapshot(&self) -> BufferSnapshot {
1105 let text = self.text.snapshot();
1106 let mut syntax_map = self.syntax_map.lock();
1107 syntax_map.interpolate(&text);
1108 let syntax = syntax_map.snapshot();
1109
1110 BufferSnapshot {
1111 text,
1112 syntax,
1113 file: self.file.clone(),
1114 remote_selections: self.remote_selections.clone(),
1115 diagnostics: self.diagnostics.clone(),
1116 language: self.language.clone(),
1117 non_text_state_update_count: self.non_text_state_update_count,
1118 }
1119 }
1120
1121 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1122 let this = cx.entity();
1123 cx.new(|cx| {
1124 let mut branch = Self {
1125 branch_state: Some(BufferBranchState {
1126 base_buffer: this.clone(),
1127 merged_operations: Default::default(),
1128 }),
1129 language: self.language.clone(),
1130 has_conflict: self.has_conflict,
1131 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1132 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1133 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1134 };
1135 if let Some(language_registry) = self.language_registry() {
1136 branch.set_language_registry(language_registry);
1137 }
1138
1139 // Reparse the branch buffer so that we get syntax highlighting immediately.
1140 branch.reparse(cx);
1141
1142 branch
1143 })
1144 }
1145
1146 pub fn preview_edits(
1147 &self,
1148 edits: Arc<[(Range<Anchor>, String)]>,
1149 cx: &App,
1150 ) -> Task<EditPreview> {
1151 let registry = self.language_registry();
1152 let language = self.language().cloned();
1153 let old_snapshot = self.text.snapshot();
1154 let mut branch_buffer = self.text.branch();
1155 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1156 let executor = cx.background_executor().clone();
1157 cx.background_spawn(async move {
1158 if !edits.is_empty() {
1159 if let Some(language) = language.clone() {
1160 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1161 }
1162
1163 branch_buffer.edit(edits.iter().cloned(), &executor);
1164 let snapshot = branch_buffer.snapshot();
1165 syntax_snapshot.interpolate(&snapshot);
1166
1167 if let Some(language) = language {
1168 syntax_snapshot.reparse(&snapshot, registry, language);
1169 }
1170 }
1171 EditPreview {
1172 old_snapshot,
1173 applied_edits_snapshot: branch_buffer.snapshot(),
1174 syntax_snapshot,
1175 }
1176 })
1177 }
1178
1179 /// Applies all of the changes in this buffer that intersect any of the
1180 /// given `ranges` to its base buffer.
1181 ///
1182 /// If `ranges` is empty, then all changes will be applied. This buffer must
1183 /// be a branch buffer to call this method.
1184 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1185 let Some(base_buffer) = self.base_buffer() else {
1186 debug_panic!("not a branch buffer");
1187 return;
1188 };
1189
1190 let mut ranges = if ranges.is_empty() {
1191 &[0..usize::MAX]
1192 } else {
1193 ranges.as_slice()
1194 }
1195 .iter()
1196 .peekable();
1197
1198 let mut edits = Vec::new();
1199 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1200 let mut is_included = false;
1201 while let Some(range) = ranges.peek() {
1202 if range.end < edit.new.start {
1203 ranges.next().unwrap();
1204 } else {
1205 if range.start <= edit.new.end {
1206 is_included = true;
1207 }
1208 break;
1209 }
1210 }
1211
1212 if is_included {
1213 edits.push((
1214 edit.old.clone(),
1215 self.text_for_range(edit.new.clone()).collect::<String>(),
1216 ));
1217 }
1218 }
1219
1220 let operation = base_buffer.update(cx, |base_buffer, cx| {
1221 // cx.emit(BufferEvent::DiffBaseChanged);
1222 base_buffer.edit(edits, None, cx)
1223 });
1224
1225 if let Some(operation) = operation
1226 && let Some(BufferBranchState {
1227 merged_operations, ..
1228 }) = &mut self.branch_state
1229 {
1230 merged_operations.push(operation);
1231 }
1232 }
1233
1234 fn on_base_buffer_event(
1235 &mut self,
1236 _: Entity<Buffer>,
1237 event: &BufferEvent,
1238 cx: &mut Context<Self>,
1239 ) {
1240 let BufferEvent::Operation { operation, .. } = event else {
1241 return;
1242 };
1243 let Some(BufferBranchState {
1244 merged_operations, ..
1245 }) = &mut self.branch_state
1246 else {
1247 return;
1248 };
1249
1250 let mut operation_to_undo = None;
1251 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1252 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1253 {
1254 merged_operations.remove(ix);
1255 operation_to_undo = Some(operation.timestamp);
1256 }
1257
1258 self.apply_ops([operation.clone()], cx);
1259
1260 if let Some(timestamp) = operation_to_undo {
1261 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1262 self.undo_operations(counts, cx);
1263 }
1264 }
1265
1266 #[cfg(test)]
1267 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1268 &self.text
1269 }
1270
1271 /// Retrieve a snapshot of the buffer's raw text, without any
1272 /// language-related state like the syntax tree or diagnostics.
1273 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1274 self.text.snapshot()
1275 }
1276
1277 /// The file associated with the buffer, if any.
1278 pub fn file(&self) -> Option<&Arc<dyn File>> {
1279 self.file.as_ref()
1280 }
1281
1282 /// The version of the buffer that was last saved or reloaded from disk.
1283 pub fn saved_version(&self) -> &clock::Global {
1284 &self.saved_version
1285 }
1286
1287 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1288 pub fn saved_mtime(&self) -> Option<MTime> {
1289 self.saved_mtime
1290 }
1291
1292 /// Assign a language to the buffer.
1293 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1294 self.non_text_state_update_count += 1;
1295 self.syntax_map.lock().clear(&self.text);
1296 self.language = language;
1297 self.was_changed();
1298 self.reparse(cx);
1299 cx.emit(BufferEvent::LanguageChanged);
1300 }
1301
1302 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1303 /// other languages if parts of the buffer are written in different languages.
1304 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1305 self.syntax_map
1306 .lock()
1307 .set_language_registry(language_registry);
1308 }
1309
1310 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1311 self.syntax_map.lock().language_registry()
1312 }
1313
1314 /// Assign the line ending type to the buffer.
1315 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1316 self.text.set_line_ending(line_ending);
1317
1318 let lamport_timestamp = self.text.lamport_clock.tick();
1319 self.send_operation(
1320 Operation::UpdateLineEnding {
1321 line_ending,
1322 lamport_timestamp,
1323 },
1324 true,
1325 cx,
1326 );
1327 }
1328
1329 /// Assign the buffer a new [`Capability`].
1330 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1331 if self.capability != capability {
1332 self.capability = capability;
1333 cx.emit(BufferEvent::CapabilityChanged)
1334 }
1335 }
1336
1337 /// This method is called to signal that the buffer has been saved.
1338 pub fn did_save(
1339 &mut self,
1340 version: clock::Global,
1341 mtime: Option<MTime>,
1342 cx: &mut Context<Self>,
1343 ) {
1344 self.saved_version = version.clone();
1345 self.has_unsaved_edits.set((version, false));
1346 self.has_conflict = false;
1347 self.saved_mtime = mtime;
1348 self.was_changed();
1349 cx.emit(BufferEvent::Saved);
1350 cx.notify();
1351 }
1352
1353 /// Reloads the contents of the buffer from disk.
1354 pub fn reload(
1355 &mut self,
1356 cx: &Context<Self>,
1357 force: bool, // whether to force the encoding even if a BOM is present
1358 ) -> oneshot::Receiver<Option<Transaction>> {
1359 let (tx, rx) = futures::channel::oneshot::channel();
1360 let encoding = self.encoding.clone();
1361
1362 let prev_version = self.text.version();
1363 self.reload_task = Some(cx.spawn(async move |this, cx| {
1364 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1365 let file = this.file.as_ref()?.as_local()?;
1366 Some((file.disk_state().mtime(), {
1367 file.load_with_encoding(cx, &*encoding.lock().unwrap(), force, encoding.clone())
1368 }))
1369 })?
1370 else {
1371 return Ok(());
1372 };
1373
1374 let new_text = new_text.await?;
1375 let diff = this
1376 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1377 .await;
1378 this.update(cx, |this, cx| {
1379 if this.version() == diff.base_version {
1380 this.finalize_last_transaction();
1381 this.apply_diff(diff, cx);
1382 tx.send(this.finalize_last_transaction().cloned()).ok();
1383 this.has_conflict = false;
1384 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1385 } else {
1386 if !diff.edits.is_empty()
1387 || this
1388 .edits_since::<usize>(&diff.base_version)
1389 .next()
1390 .is_some()
1391 {
1392 this.has_conflict = true;
1393 }
1394
1395 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1396 }
1397
1398 this.reload_task.take();
1399 })
1400 }));
1401 rx
1402 }
1403
1404 /// This method is called to signal that the buffer has been reloaded.
1405 pub fn did_reload(
1406 &mut self,
1407 version: clock::Global,
1408 line_ending: LineEnding,
1409 mtime: Option<MTime>,
1410 cx: &mut Context<Self>,
1411 ) {
1412 self.saved_version = version;
1413 self.has_unsaved_edits
1414 .set((self.saved_version.clone(), false));
1415 self.text.set_line_ending(line_ending);
1416 self.saved_mtime = mtime;
1417 cx.emit(BufferEvent::Reloaded);
1418 cx.notify();
1419 }
1420
1421 /// Updates the [`File`] backing this buffer. This should be called when
1422 /// the file has changed or has been deleted.
1423 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1424 let was_dirty = self.is_dirty();
1425 let mut file_changed = false;
1426
1427 if let Some(old_file) = self.file.as_ref() {
1428 if new_file.path() != old_file.path() {
1429 file_changed = true;
1430 }
1431
1432 let old_state = old_file.disk_state();
1433 let new_state = new_file.disk_state();
1434 if old_state != new_state {
1435 file_changed = true;
1436 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1437 cx.emit(BufferEvent::ReloadNeeded)
1438 }
1439 }
1440 } else {
1441 file_changed = true;
1442 };
1443
1444 self.file = Some(new_file);
1445 if file_changed {
1446 self.was_changed();
1447 self.non_text_state_update_count += 1;
1448 if was_dirty != self.is_dirty() {
1449 cx.emit(BufferEvent::DirtyChanged);
1450 }
1451 cx.emit(BufferEvent::FileHandleChanged);
1452 cx.notify();
1453 }
1454 }
1455
1456 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1457 Some(self.branch_state.as_ref()?.base_buffer.clone())
1458 }
1459
1460 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1461 pub fn language(&self) -> Option<&Arc<Language>> {
1462 self.language.as_ref()
1463 }
1464
1465 /// Returns the [`Language`] at the given location.
1466 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1467 let offset = position.to_offset(self);
1468 let mut is_first = true;
1469 let start_anchor = self.anchor_before(offset);
1470 let end_anchor = self.anchor_after(offset);
1471 self.syntax_map
1472 .lock()
1473 .layers_for_range(offset..offset, &self.text, false)
1474 .filter(|layer| {
1475 if is_first {
1476 is_first = false;
1477 return true;
1478 }
1479
1480 layer
1481 .included_sub_ranges
1482 .map(|sub_ranges| {
1483 sub_ranges.iter().any(|sub_range| {
1484 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1485 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1486 !is_before_start && !is_after_end
1487 })
1488 })
1489 .unwrap_or(true)
1490 })
1491 .last()
1492 .map(|info| info.language.clone())
1493 .or_else(|| self.language.clone())
1494 }
1495
1496 /// Returns each [`Language`] for the active syntax layers at the given location.
1497 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1498 let offset = position.to_offset(self);
1499 let mut languages: Vec<Arc<Language>> = self
1500 .syntax_map
1501 .lock()
1502 .layers_for_range(offset..offset, &self.text, false)
1503 .map(|info| info.language.clone())
1504 .collect();
1505
1506 if languages.is_empty()
1507 && let Some(buffer_language) = self.language()
1508 {
1509 languages.push(buffer_language.clone());
1510 }
1511
1512 languages
1513 }
1514
1515 /// An integer version number that accounts for all updates besides
1516 /// the buffer's text itself (which is versioned via a version vector).
1517 pub fn non_text_state_update_count(&self) -> usize {
1518 self.non_text_state_update_count
1519 }
1520
1521 /// Whether the buffer is being parsed in the background.
1522 #[cfg(any(test, feature = "test-support"))]
1523 pub fn is_parsing(&self) -> bool {
1524 self.reparse.is_some()
1525 }
1526
1527 /// Indicates whether the buffer contains any regions that may be
1528 /// written in a language that hasn't been loaded yet.
1529 pub fn contains_unknown_injections(&self) -> bool {
1530 self.syntax_map.lock().contains_unknown_injections()
1531 }
1532
1533 #[cfg(any(test, feature = "test-support"))]
1534 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1535 self.sync_parse_timeout = timeout;
1536 }
1537
1538 /// Called after an edit to synchronize the buffer's main parse tree with
1539 /// the buffer's new underlying state.
1540 ///
1541 /// Locks the syntax map and interpolates the edits since the last reparse
1542 /// into the foreground syntax tree.
1543 ///
1544 /// Then takes a stable snapshot of the syntax map before unlocking it.
1545 /// The snapshot with the interpolated edits is sent to a background thread,
1546 /// where we ask Tree-sitter to perform an incremental parse.
1547 ///
1548 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1549 /// waiting on the parse to complete. As soon as it completes, we proceed
1550 /// synchronously, unless a 1ms timeout elapses.
1551 ///
1552 /// If we time out waiting on the parse, we spawn a second task waiting
1553 /// until the parse does complete and return with the interpolated tree still
1554 /// in the foreground. When the background parse completes, call back into
1555 /// the main thread and assign the foreground parse state.
1556 ///
1557 /// If the buffer or grammar changed since the start of the background parse,
1558 /// initiate an additional reparse recursively. To avoid concurrent parses
1559 /// for the same buffer, we only initiate a new parse if we are not already
1560 /// parsing in the background.
1561 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1562 if self.reparse.is_some() {
1563 return;
1564 }
1565 let language = if let Some(language) = self.language.clone() {
1566 language
1567 } else {
1568 return;
1569 };
1570
1571 let text = self.text_snapshot();
1572 let parsed_version = self.version();
1573
1574 let mut syntax_map = self.syntax_map.lock();
1575 syntax_map.interpolate(&text);
1576 let language_registry = syntax_map.language_registry();
1577 let mut syntax_snapshot = syntax_map.snapshot();
1578 drop(syntax_map);
1579
1580 let parse_task = cx.background_spawn({
1581 let language = language.clone();
1582 let language_registry = language_registry.clone();
1583 async move {
1584 syntax_snapshot.reparse(&text, language_registry, language);
1585 syntax_snapshot
1586 }
1587 });
1588
1589 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1590 match cx
1591 .background_executor()
1592 .block_with_timeout(self.sync_parse_timeout, parse_task)
1593 {
1594 Ok(new_syntax_snapshot) => {
1595 self.did_finish_parsing(new_syntax_snapshot, cx);
1596 self.reparse = None;
1597 }
1598 Err(parse_task) => {
1599 // todo(lw): hot foreground spawn
1600 self.reparse = Some(cx.spawn(async move |this, cx| {
1601 let new_syntax_map = cx.background_spawn(parse_task).await;
1602 this.update(cx, move |this, cx| {
1603 let grammar_changed = || {
1604 this.language.as_ref().is_none_or(|current_language| {
1605 !Arc::ptr_eq(&language, current_language)
1606 })
1607 };
1608 let language_registry_changed = || {
1609 new_syntax_map.contains_unknown_injections()
1610 && language_registry.is_some_and(|registry| {
1611 registry.version() != new_syntax_map.language_registry_version()
1612 })
1613 };
1614 let parse_again = this.version.changed_since(&parsed_version)
1615 || language_registry_changed()
1616 || grammar_changed();
1617 this.did_finish_parsing(new_syntax_map, cx);
1618 this.reparse = None;
1619 if parse_again {
1620 this.reparse(cx);
1621 }
1622 })
1623 .ok();
1624 }));
1625 }
1626 }
1627 }
1628
1629 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1630 self.was_changed();
1631 self.non_text_state_update_count += 1;
1632 self.syntax_map.lock().did_parse(syntax_snapshot);
1633 self.request_autoindent(cx);
1634 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1635 cx.emit(BufferEvent::Reparsed);
1636 cx.notify();
1637 }
1638
1639 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1640 self.parse_status.1.clone()
1641 }
1642
1643 /// Assign to the buffer a set of diagnostics created by a given language server.
1644 pub fn update_diagnostics(
1645 &mut self,
1646 server_id: LanguageServerId,
1647 diagnostics: DiagnosticSet,
1648 cx: &mut Context<Self>,
1649 ) {
1650 let lamport_timestamp = self.text.lamport_clock.tick();
1651 let op = Operation::UpdateDiagnostics {
1652 server_id,
1653 diagnostics: diagnostics.iter().cloned().collect(),
1654 lamport_timestamp,
1655 };
1656
1657 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1658 self.send_operation(op, true, cx);
1659 }
1660
1661 pub fn buffer_diagnostics(
1662 &self,
1663 for_server: Option<LanguageServerId>,
1664 ) -> Vec<&DiagnosticEntry<Anchor>> {
1665 match for_server {
1666 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1667 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1668 Err(_) => Vec::new(),
1669 },
1670 None => self
1671 .diagnostics
1672 .iter()
1673 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1674 .collect(),
1675 }
1676 }
1677
1678 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1679 if let Some(indent_sizes) = self.compute_autoindents() {
1680 let indent_sizes = cx.background_spawn(indent_sizes);
1681 match cx
1682 .background_executor()
1683 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1684 {
1685 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1686 Err(indent_sizes) => {
1687 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1688 let indent_sizes = indent_sizes.await;
1689 this.update(cx, |this, cx| {
1690 this.apply_autoindents(indent_sizes, cx);
1691 })
1692 .ok();
1693 }));
1694 }
1695 }
1696 } else {
1697 self.autoindent_requests.clear();
1698 for tx in self.wait_for_autoindent_txs.drain(..) {
1699 tx.send(()).ok();
1700 }
1701 }
1702 }
1703
1704 fn compute_autoindents(
1705 &self,
1706 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1707 let max_rows_between_yields = 100;
1708 let snapshot = self.snapshot();
1709 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1710 return None;
1711 }
1712
1713 let autoindent_requests = self.autoindent_requests.clone();
1714 Some(async move {
1715 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1716 for request in autoindent_requests {
1717 // Resolve each edited range to its row in the current buffer and in the
1718 // buffer before this batch of edits.
1719 let mut row_ranges = Vec::new();
1720 let mut old_to_new_rows = BTreeMap::new();
1721 let mut language_indent_sizes_by_new_row = Vec::new();
1722 for entry in &request.entries {
1723 let position = entry.range.start;
1724 let new_row = position.to_point(&snapshot).row;
1725 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1726 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1727
1728 if !entry.first_line_is_new {
1729 let old_row = position.to_point(&request.before_edit).row;
1730 old_to_new_rows.insert(old_row, new_row);
1731 }
1732 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1733 }
1734
1735 // Build a map containing the suggested indentation for each of the edited lines
1736 // with respect to the state of the buffer before these edits. This map is keyed
1737 // by the rows for these lines in the current state of the buffer.
1738 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1739 let old_edited_ranges =
1740 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1741 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1742 let mut language_indent_size = IndentSize::default();
1743 for old_edited_range in old_edited_ranges {
1744 let suggestions = request
1745 .before_edit
1746 .suggest_autoindents(old_edited_range.clone())
1747 .into_iter()
1748 .flatten();
1749 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1750 if let Some(suggestion) = suggestion {
1751 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1752
1753 // Find the indent size based on the language for this row.
1754 while let Some((row, size)) = language_indent_sizes.peek() {
1755 if *row > new_row {
1756 break;
1757 }
1758 language_indent_size = *size;
1759 language_indent_sizes.next();
1760 }
1761
1762 let suggested_indent = old_to_new_rows
1763 .get(&suggestion.basis_row)
1764 .and_then(|from_row| {
1765 Some(old_suggestions.get(from_row).copied()?.0)
1766 })
1767 .unwrap_or_else(|| {
1768 request
1769 .before_edit
1770 .indent_size_for_line(suggestion.basis_row)
1771 })
1772 .with_delta(suggestion.delta, language_indent_size);
1773 old_suggestions
1774 .insert(new_row, (suggested_indent, suggestion.within_error));
1775 }
1776 }
1777 yield_now().await;
1778 }
1779
1780 // Compute new suggestions for each line, but only include them in the result
1781 // if they differ from the old suggestion for that line.
1782 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1783 let mut language_indent_size = IndentSize::default();
1784 for (row_range, original_indent_column) in row_ranges {
1785 let new_edited_row_range = if request.is_block_mode {
1786 row_range.start..row_range.start + 1
1787 } else {
1788 row_range.clone()
1789 };
1790
1791 let suggestions = snapshot
1792 .suggest_autoindents(new_edited_row_range.clone())
1793 .into_iter()
1794 .flatten();
1795 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1796 if let Some(suggestion) = suggestion {
1797 // Find the indent size based on the language for this row.
1798 while let Some((row, size)) = language_indent_sizes.peek() {
1799 if *row > new_row {
1800 break;
1801 }
1802 language_indent_size = *size;
1803 language_indent_sizes.next();
1804 }
1805
1806 let suggested_indent = indent_sizes
1807 .get(&suggestion.basis_row)
1808 .copied()
1809 .map(|e| e.0)
1810 .unwrap_or_else(|| {
1811 snapshot.indent_size_for_line(suggestion.basis_row)
1812 })
1813 .with_delta(suggestion.delta, language_indent_size);
1814
1815 if old_suggestions.get(&new_row).is_none_or(
1816 |(old_indentation, was_within_error)| {
1817 suggested_indent != *old_indentation
1818 && (!suggestion.within_error || *was_within_error)
1819 },
1820 ) {
1821 indent_sizes.insert(
1822 new_row,
1823 (suggested_indent, request.ignore_empty_lines),
1824 );
1825 }
1826 }
1827 }
1828
1829 if let (true, Some(original_indent_column)) =
1830 (request.is_block_mode, original_indent_column)
1831 {
1832 let new_indent =
1833 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1834 *indent
1835 } else {
1836 snapshot.indent_size_for_line(row_range.start)
1837 };
1838 let delta = new_indent.len as i64 - original_indent_column as i64;
1839 if delta != 0 {
1840 for row in row_range.skip(1) {
1841 indent_sizes.entry(row).or_insert_with(|| {
1842 let mut size = snapshot.indent_size_for_line(row);
1843 if size.kind == new_indent.kind {
1844 match delta.cmp(&0) {
1845 Ordering::Greater => size.len += delta as u32,
1846 Ordering::Less => {
1847 size.len = size.len.saturating_sub(-delta as u32)
1848 }
1849 Ordering::Equal => {}
1850 }
1851 }
1852 (size, request.ignore_empty_lines)
1853 });
1854 }
1855 }
1856 }
1857
1858 yield_now().await;
1859 }
1860 }
1861
1862 indent_sizes
1863 .into_iter()
1864 .filter_map(|(row, (indent, ignore_empty_lines))| {
1865 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1866 None
1867 } else {
1868 Some((row, indent))
1869 }
1870 })
1871 .collect()
1872 })
1873 }
1874
1875 fn apply_autoindents(
1876 &mut self,
1877 indent_sizes: BTreeMap<u32, IndentSize>,
1878 cx: &mut Context<Self>,
1879 ) {
1880 self.autoindent_requests.clear();
1881 for tx in self.wait_for_autoindent_txs.drain(..) {
1882 tx.send(()).ok();
1883 }
1884
1885 let edits: Vec<_> = indent_sizes
1886 .into_iter()
1887 .filter_map(|(row, indent_size)| {
1888 let current_size = indent_size_for_line(self, row);
1889 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1890 })
1891 .collect();
1892
1893 let preserve_preview = self.preserve_preview();
1894 self.edit(edits, None, cx);
1895 if preserve_preview {
1896 self.refresh_preview();
1897 }
1898 }
1899
1900 /// Create a minimal edit that will cause the given row to be indented
1901 /// with the given size. After applying this edit, the length of the line
1902 /// will always be at least `new_size.len`.
1903 pub fn edit_for_indent_size_adjustment(
1904 row: u32,
1905 current_size: IndentSize,
1906 new_size: IndentSize,
1907 ) -> Option<(Range<Point>, String)> {
1908 if new_size.kind == current_size.kind {
1909 match new_size.len.cmp(¤t_size.len) {
1910 Ordering::Greater => {
1911 let point = Point::new(row, 0);
1912 Some((
1913 point..point,
1914 iter::repeat(new_size.char())
1915 .take((new_size.len - current_size.len) as usize)
1916 .collect::<String>(),
1917 ))
1918 }
1919
1920 Ordering::Less => Some((
1921 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1922 String::new(),
1923 )),
1924
1925 Ordering::Equal => None,
1926 }
1927 } else {
1928 Some((
1929 Point::new(row, 0)..Point::new(row, current_size.len),
1930 iter::repeat(new_size.char())
1931 .take(new_size.len as usize)
1932 .collect::<String>(),
1933 ))
1934 }
1935 }
1936
1937 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1938 /// and the given new text.
1939 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1940 let old_text = self.as_rope().clone();
1941 let base_version = self.version();
1942 cx.background_executor()
1943 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1944 let old_text = old_text.to_string();
1945 let line_ending = LineEnding::detect(&new_text);
1946 LineEnding::normalize(&mut new_text);
1947 let edits = text_diff(&old_text, &new_text);
1948 Diff {
1949 base_version,
1950 line_ending,
1951 edits,
1952 }
1953 })
1954 }
1955
1956 /// Spawns a background task that searches the buffer for any whitespace
1957 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1958 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1959 let old_text = self.as_rope().clone();
1960 let line_ending = self.line_ending();
1961 let base_version = self.version();
1962 cx.background_spawn(async move {
1963 let ranges = trailing_whitespace_ranges(&old_text);
1964 let empty = Arc::<str>::from("");
1965 Diff {
1966 base_version,
1967 line_ending,
1968 edits: ranges
1969 .into_iter()
1970 .map(|range| (range, empty.clone()))
1971 .collect(),
1972 }
1973 })
1974 }
1975
1976 /// Ensures that the buffer ends with a single newline character, and
1977 /// no other whitespace. Skips if the buffer is empty.
1978 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1979 let len = self.len();
1980 if len == 0 {
1981 return;
1982 }
1983 let mut offset = len;
1984 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1985 let non_whitespace_len = chunk
1986 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1987 .len();
1988 offset -= chunk.len();
1989 offset += non_whitespace_len;
1990 if non_whitespace_len != 0 {
1991 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1992 return;
1993 }
1994 break;
1995 }
1996 }
1997 self.edit([(offset..len, "\n")], None, cx);
1998 }
1999
2000 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2001 /// calculated, then adjust the diff to account for those changes, and discard any
2002 /// parts of the diff that conflict with those changes.
2003 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 let snapshot = self.snapshot();
2005 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2006 let mut delta = 0;
2007 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2008 while let Some(edit_since) = edits_since.peek() {
2009 // If the edit occurs after a diff hunk, then it does not
2010 // affect that hunk.
2011 if edit_since.old.start > range.end {
2012 break;
2013 }
2014 // If the edit precedes the diff hunk, then adjust the hunk
2015 // to reflect the edit.
2016 else if edit_since.old.end < range.start {
2017 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2018 edits_since.next();
2019 }
2020 // If the edit intersects a diff hunk, then discard that hunk.
2021 else {
2022 return None;
2023 }
2024 }
2025
2026 let start = (range.start as i64 + delta) as usize;
2027 let end = (range.end as i64 + delta) as usize;
2028 Some((start..end, new_text))
2029 });
2030
2031 self.start_transaction();
2032 self.text.set_line_ending(diff.line_ending);
2033 self.edit(adjusted_edits, None, cx);
2034 self.end_transaction(cx)
2035 }
2036
2037 pub fn has_unsaved_edits(&self) -> bool {
2038 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2039
2040 if last_version == self.version {
2041 self.has_unsaved_edits
2042 .set((last_version, has_unsaved_edits));
2043 return has_unsaved_edits;
2044 }
2045
2046 let has_edits = self.has_edits_since(&self.saved_version);
2047 self.has_unsaved_edits
2048 .set((self.version.clone(), has_edits));
2049 has_edits
2050 }
2051
2052 /// Checks if the buffer has unsaved changes.
2053 pub fn is_dirty(&self) -> bool {
2054 if self.capability == Capability::ReadOnly {
2055 return false;
2056 }
2057 if self.has_conflict {
2058 return true;
2059 }
2060 match self.file.as_ref().map(|f| f.disk_state()) {
2061 Some(DiskState::New) | Some(DiskState::Deleted) => {
2062 !self.is_empty() && self.has_unsaved_edits()
2063 }
2064 _ => self.has_unsaved_edits(),
2065 }
2066 }
2067
2068 /// Checks if the buffer and its file have both changed since the buffer
2069 /// was last saved or reloaded.
2070 pub fn has_conflict(&self) -> bool {
2071 if self.has_conflict {
2072 return true;
2073 }
2074 let Some(file) = self.file.as_ref() else {
2075 return false;
2076 };
2077 match file.disk_state() {
2078 DiskState::New => false,
2079 DiskState::Present { mtime } => match self.saved_mtime {
2080 Some(saved_mtime) => {
2081 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2082 }
2083 None => true,
2084 },
2085 DiskState::Deleted => false,
2086 }
2087 }
2088
2089 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2090 pub fn subscribe(&mut self) -> Subscription {
2091 self.text.subscribe()
2092 }
2093
2094 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2095 ///
2096 /// This allows downstream code to check if the buffer's text has changed without
2097 /// waiting for an effect cycle, which would be required if using eents.
2098 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2099 if let Err(ix) = self
2100 .change_bits
2101 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2102 {
2103 self.change_bits.insert(ix, bit);
2104 }
2105 }
2106
2107 /// Set the change bit for all "listeners".
2108 fn was_changed(&mut self) {
2109 self.change_bits.retain(|change_bit| {
2110 change_bit
2111 .upgrade()
2112 .inspect(|bit| {
2113 _ = bit.replace(true);
2114 })
2115 .is_some()
2116 });
2117 }
2118
2119 /// Starts a transaction, if one is not already in-progress. When undoing or
2120 /// redoing edits, all of the edits performed within a transaction are undone
2121 /// or redone together.
2122 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2123 self.start_transaction_at(Instant::now())
2124 }
2125
2126 /// Starts a transaction, providing the current time. Subsequent transactions
2127 /// that occur within a short period of time will be grouped together. This
2128 /// is controlled by the buffer's undo grouping duration.
2129 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2130 self.transaction_depth += 1;
2131 if self.was_dirty_before_starting_transaction.is_none() {
2132 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2133 }
2134 self.text.start_transaction_at(now)
2135 }
2136
2137 /// Terminates the current transaction, if this is the outermost transaction.
2138 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2139 self.end_transaction_at(Instant::now(), cx)
2140 }
2141
2142 /// Terminates the current transaction, providing the current time. Subsequent transactions
2143 /// that occur within a short period of time will be grouped together. This
2144 /// is controlled by the buffer's undo grouping duration.
2145 pub fn end_transaction_at(
2146 &mut self,
2147 now: Instant,
2148 cx: &mut Context<Self>,
2149 ) -> Option<TransactionId> {
2150 assert!(self.transaction_depth > 0);
2151 self.transaction_depth -= 1;
2152 let was_dirty = if self.transaction_depth == 0 {
2153 self.was_dirty_before_starting_transaction.take().unwrap()
2154 } else {
2155 false
2156 };
2157 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2158 self.did_edit(&start_version, was_dirty, cx);
2159 Some(transaction_id)
2160 } else {
2161 None
2162 }
2163 }
2164
2165 /// Manually add a transaction to the buffer's undo history.
2166 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2167 self.text.push_transaction(transaction, now);
2168 }
2169
2170 /// Differs from `push_transaction` in that it does not clear the redo
2171 /// stack. Intended to be used to create a parent transaction to merge
2172 /// potential child transactions into.
2173 ///
2174 /// The caller is responsible for removing it from the undo history using
2175 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2176 /// are merged into this transaction, the caller is responsible for ensuring
2177 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2178 /// cleared is to create transactions with the usual `start_transaction` and
2179 /// `end_transaction` methods and merging the resulting transactions into
2180 /// the transaction created by this method
2181 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2182 self.text.push_empty_transaction(now)
2183 }
2184
2185 /// Prevent the last transaction from being grouped with any subsequent transactions,
2186 /// even if they occur with the buffer's undo grouping duration.
2187 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2188 self.text.finalize_last_transaction()
2189 }
2190
2191 /// Manually group all changes since a given transaction.
2192 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2193 self.text.group_until_transaction(transaction_id);
2194 }
2195
2196 /// Manually remove a transaction from the buffer's undo history
2197 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2198 self.text.forget_transaction(transaction_id)
2199 }
2200
2201 /// Retrieve a transaction from the buffer's undo history
2202 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2203 self.text.get_transaction(transaction_id)
2204 }
2205
2206 /// Manually merge two transactions in the buffer's undo history.
2207 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2208 self.text.merge_transactions(transaction, destination);
2209 }
2210
2211 /// Waits for the buffer to receive operations with the given timestamps.
2212 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2213 &mut self,
2214 edit_ids: It,
2215 ) -> impl Future<Output = Result<()>> + use<It> {
2216 self.text.wait_for_edits(edit_ids)
2217 }
2218
2219 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2220 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2221 &mut self,
2222 anchors: It,
2223 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2224 self.text.wait_for_anchors(anchors)
2225 }
2226
2227 /// Waits for the buffer to receive operations up to the given version.
2228 pub fn wait_for_version(
2229 &mut self,
2230 version: clock::Global,
2231 ) -> impl Future<Output = Result<()>> + use<> {
2232 self.text.wait_for_version(version)
2233 }
2234
2235 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2236 /// [`Buffer::wait_for_version`] to resolve with an error.
2237 pub fn give_up_waiting(&mut self) {
2238 self.text.give_up_waiting();
2239 }
2240
2241 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2242 let mut rx = None;
2243 if !self.autoindent_requests.is_empty() {
2244 let channel = oneshot::channel();
2245 self.wait_for_autoindent_txs.push(channel.0);
2246 rx = Some(channel.1);
2247 }
2248 rx
2249 }
2250
2251 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2252 pub fn set_active_selections(
2253 &mut self,
2254 selections: Arc<[Selection<Anchor>]>,
2255 line_mode: bool,
2256 cursor_shape: CursorShape,
2257 cx: &mut Context<Self>,
2258 ) {
2259 let lamport_timestamp = self.text.lamport_clock.tick();
2260 self.remote_selections.insert(
2261 self.text.replica_id(),
2262 SelectionSet {
2263 selections: selections.clone(),
2264 lamport_timestamp,
2265 line_mode,
2266 cursor_shape,
2267 },
2268 );
2269 self.send_operation(
2270 Operation::UpdateSelections {
2271 selections,
2272 line_mode,
2273 lamport_timestamp,
2274 cursor_shape,
2275 },
2276 true,
2277 cx,
2278 );
2279 self.non_text_state_update_count += 1;
2280 cx.notify();
2281 }
2282
2283 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2284 /// this replica.
2285 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2286 if self
2287 .remote_selections
2288 .get(&self.text.replica_id())
2289 .is_none_or(|set| !set.selections.is_empty())
2290 {
2291 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2292 }
2293 }
2294
2295 pub fn set_agent_selections(
2296 &mut self,
2297 selections: Arc<[Selection<Anchor>]>,
2298 line_mode: bool,
2299 cursor_shape: CursorShape,
2300 cx: &mut Context<Self>,
2301 ) {
2302 let lamport_timestamp = self.text.lamport_clock.tick();
2303 self.remote_selections.insert(
2304 ReplicaId::AGENT,
2305 SelectionSet {
2306 selections,
2307 lamport_timestamp,
2308 line_mode,
2309 cursor_shape,
2310 },
2311 );
2312 self.non_text_state_update_count += 1;
2313 cx.notify();
2314 }
2315
2316 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2317 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2318 }
2319
2320 /// Replaces the buffer's entire text.
2321 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2322 where
2323 T: Into<Arc<str>>,
2324 {
2325 self.autoindent_requests.clear();
2326 self.edit([(0..self.len(), text)], None, cx)
2327 }
2328
2329 /// Appends the given text to the end of the buffer.
2330 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2331 where
2332 T: Into<Arc<str>>,
2333 {
2334 self.edit([(self.len()..self.len(), text)], None, cx)
2335 }
2336
2337 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2338 /// delete, and a string of text to insert at that location.
2339 ///
2340 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2341 /// request for the edited ranges, which will be processed when the buffer finishes
2342 /// parsing.
2343 ///
2344 /// Parsing takes place at the end of a transaction, and may compute synchronously
2345 /// or asynchronously, depending on the changes.
2346 pub fn edit<I, S, T>(
2347 &mut self,
2348 edits_iter: I,
2349 autoindent_mode: Option<AutoindentMode>,
2350 cx: &mut Context<Self>,
2351 ) -> Option<clock::Lamport>
2352 where
2353 I: IntoIterator<Item = (Range<S>, T)>,
2354 S: ToOffset,
2355 T: Into<Arc<str>>,
2356 {
2357 // Skip invalid edits and coalesce contiguous ones.
2358 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2359
2360 for (range, new_text) in edits_iter {
2361 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2362
2363 if range.start > range.end {
2364 mem::swap(&mut range.start, &mut range.end);
2365 }
2366 let new_text = new_text.into();
2367 if !new_text.is_empty() || !range.is_empty() {
2368 if let Some((prev_range, prev_text)) = edits.last_mut()
2369 && prev_range.end >= range.start
2370 {
2371 prev_range.end = cmp::max(prev_range.end, range.end);
2372 *prev_text = format!("{prev_text}{new_text}").into();
2373 } else {
2374 edits.push((range, new_text));
2375 }
2376 }
2377 }
2378 if edits.is_empty() {
2379 return None;
2380 }
2381
2382 self.start_transaction();
2383 self.pending_autoindent.take();
2384 let autoindent_request = autoindent_mode
2385 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2386
2387 let edit_operation = self
2388 .text
2389 .edit(edits.iter().cloned(), cx.background_executor());
2390 let edit_id = edit_operation.timestamp();
2391
2392 if let Some((before_edit, mode)) = autoindent_request {
2393 let mut delta = 0isize;
2394 let mut previous_setting = None;
2395 let entries: Vec<_> = edits
2396 .into_iter()
2397 .enumerate()
2398 .zip(&edit_operation.as_edit().unwrap().new_text)
2399 .filter(|((_, (range, _)), _)| {
2400 let language = before_edit.language_at(range.start);
2401 let language_id = language.map(|l| l.id());
2402 if let Some((cached_language_id, auto_indent)) = previous_setting
2403 && cached_language_id == language_id
2404 {
2405 auto_indent
2406 } else {
2407 // The auto-indent setting is not present in editorconfigs, hence
2408 // we can avoid passing the file here.
2409 let auto_indent =
2410 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2411 previous_setting = Some((language_id, auto_indent));
2412 auto_indent
2413 }
2414 })
2415 .map(|((ix, (range, _)), new_text)| {
2416 let new_text_length = new_text.len();
2417 let old_start = range.start.to_point(&before_edit);
2418 let new_start = (delta + range.start as isize) as usize;
2419 let range_len = range.end - range.start;
2420 delta += new_text_length as isize - range_len as isize;
2421
2422 // Decide what range of the insertion to auto-indent, and whether
2423 // the first line of the insertion should be considered a newly-inserted line
2424 // or an edit to an existing line.
2425 let mut range_of_insertion_to_indent = 0..new_text_length;
2426 let mut first_line_is_new = true;
2427
2428 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2429 let old_line_end = before_edit.line_len(old_start.row);
2430
2431 if old_start.column > old_line_start {
2432 first_line_is_new = false;
2433 }
2434
2435 if !new_text.contains('\n')
2436 && (old_start.column + (range_len as u32) < old_line_end
2437 || old_line_end == old_line_start)
2438 {
2439 first_line_is_new = false;
2440 }
2441
2442 // When inserting text starting with a newline, avoid auto-indenting the
2443 // previous line.
2444 if new_text.starts_with('\n') {
2445 range_of_insertion_to_indent.start += 1;
2446 first_line_is_new = true;
2447 }
2448
2449 let mut original_indent_column = None;
2450 if let AutoindentMode::Block {
2451 original_indent_columns,
2452 } = &mode
2453 {
2454 original_indent_column = Some(if new_text.starts_with('\n') {
2455 indent_size_for_text(
2456 new_text[range_of_insertion_to_indent.clone()].chars(),
2457 )
2458 .len
2459 } else {
2460 original_indent_columns
2461 .get(ix)
2462 .copied()
2463 .flatten()
2464 .unwrap_or_else(|| {
2465 indent_size_for_text(
2466 new_text[range_of_insertion_to_indent.clone()].chars(),
2467 )
2468 .len
2469 })
2470 });
2471
2472 // Avoid auto-indenting the line after the edit.
2473 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2474 range_of_insertion_to_indent.end -= 1;
2475 }
2476 }
2477
2478 AutoindentRequestEntry {
2479 first_line_is_new,
2480 original_indent_column,
2481 indent_size: before_edit.language_indent_size_at(range.start, cx),
2482 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2483 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2484 }
2485 })
2486 .collect();
2487
2488 if !entries.is_empty() {
2489 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2490 before_edit,
2491 entries,
2492 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2493 ignore_empty_lines: false,
2494 }));
2495 }
2496 }
2497
2498 self.end_transaction(cx);
2499 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2500 Some(edit_id)
2501 }
2502
2503 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2504 self.was_changed();
2505
2506 if self.edits_since::<usize>(old_version).next().is_none() {
2507 return;
2508 }
2509
2510 self.reparse(cx);
2511 cx.emit(BufferEvent::Edited);
2512 if was_dirty != self.is_dirty() {
2513 cx.emit(BufferEvent::DirtyChanged);
2514 }
2515 cx.notify();
2516 }
2517
2518 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2519 where
2520 I: IntoIterator<Item = Range<T>>,
2521 T: ToOffset + Copy,
2522 {
2523 let before_edit = self.snapshot();
2524 let entries = ranges
2525 .into_iter()
2526 .map(|range| AutoindentRequestEntry {
2527 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2528 first_line_is_new: true,
2529 indent_size: before_edit.language_indent_size_at(range.start, cx),
2530 original_indent_column: None,
2531 })
2532 .collect();
2533 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2534 before_edit,
2535 entries,
2536 is_block_mode: false,
2537 ignore_empty_lines: true,
2538 }));
2539 self.request_autoindent(cx);
2540 }
2541
2542 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2543 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2544 pub fn insert_empty_line(
2545 &mut self,
2546 position: impl ToPoint,
2547 space_above: bool,
2548 space_below: bool,
2549 cx: &mut Context<Self>,
2550 ) -> Point {
2551 let mut position = position.to_point(self);
2552
2553 self.start_transaction();
2554
2555 self.edit(
2556 [(position..position, "\n")],
2557 Some(AutoindentMode::EachLine),
2558 cx,
2559 );
2560
2561 if position.column > 0 {
2562 position += Point::new(1, 0);
2563 }
2564
2565 if !self.is_line_blank(position.row) {
2566 self.edit(
2567 [(position..position, "\n")],
2568 Some(AutoindentMode::EachLine),
2569 cx,
2570 );
2571 }
2572
2573 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2574 self.edit(
2575 [(position..position, "\n")],
2576 Some(AutoindentMode::EachLine),
2577 cx,
2578 );
2579 position.row += 1;
2580 }
2581
2582 if space_below
2583 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2584 {
2585 self.edit(
2586 [(position..position, "\n")],
2587 Some(AutoindentMode::EachLine),
2588 cx,
2589 );
2590 }
2591
2592 self.end_transaction(cx);
2593
2594 position
2595 }
2596
2597 /// Applies the given remote operations to the buffer.
2598 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2599 self.pending_autoindent.take();
2600 let was_dirty = self.is_dirty();
2601 let old_version = self.version.clone();
2602 let mut deferred_ops = Vec::new();
2603 let buffer_ops = ops
2604 .into_iter()
2605 .filter_map(|op| match op {
2606 Operation::Buffer(op) => Some(op),
2607 _ => {
2608 if self.can_apply_op(&op) {
2609 self.apply_op(op, cx);
2610 } else {
2611 deferred_ops.push(op);
2612 }
2613 None
2614 }
2615 })
2616 .collect::<Vec<_>>();
2617 for operation in buffer_ops.iter() {
2618 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2619 }
2620 self.text
2621 .apply_ops(buffer_ops, Some(cx.background_executor()));
2622 self.deferred_ops.insert(deferred_ops);
2623 self.flush_deferred_ops(cx);
2624 self.did_edit(&old_version, was_dirty, cx);
2625 // Notify independently of whether the buffer was edited as the operations could include a
2626 // selection update.
2627 cx.notify();
2628 }
2629
2630 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2631 let mut deferred_ops = Vec::new();
2632 for op in self.deferred_ops.drain().iter().cloned() {
2633 if self.can_apply_op(&op) {
2634 self.apply_op(op, cx);
2635 } else {
2636 deferred_ops.push(op);
2637 }
2638 }
2639 self.deferred_ops.insert(deferred_ops);
2640 }
2641
2642 pub fn has_deferred_ops(&self) -> bool {
2643 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2644 }
2645
2646 fn can_apply_op(&self, operation: &Operation) -> bool {
2647 match operation {
2648 Operation::Buffer(_) => {
2649 unreachable!("buffer operations should never be applied at this layer")
2650 }
2651 Operation::UpdateDiagnostics {
2652 diagnostics: diagnostic_set,
2653 ..
2654 } => diagnostic_set.iter().all(|diagnostic| {
2655 self.text.can_resolve(&diagnostic.range.start)
2656 && self.text.can_resolve(&diagnostic.range.end)
2657 }),
2658 Operation::UpdateSelections { selections, .. } => selections
2659 .iter()
2660 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2661 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2662 }
2663 }
2664
2665 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2666 match operation {
2667 Operation::Buffer(_) => {
2668 unreachable!("buffer operations should never be applied at this layer")
2669 }
2670 Operation::UpdateDiagnostics {
2671 server_id,
2672 diagnostics: diagnostic_set,
2673 lamport_timestamp,
2674 } => {
2675 let snapshot = self.snapshot();
2676 self.apply_diagnostic_update(
2677 server_id,
2678 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2679 lamport_timestamp,
2680 cx,
2681 );
2682 }
2683 Operation::UpdateSelections {
2684 selections,
2685 lamport_timestamp,
2686 line_mode,
2687 cursor_shape,
2688 } => {
2689 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2690 && set.lamport_timestamp > lamport_timestamp
2691 {
2692 return;
2693 }
2694
2695 self.remote_selections.insert(
2696 lamport_timestamp.replica_id,
2697 SelectionSet {
2698 selections,
2699 lamport_timestamp,
2700 line_mode,
2701 cursor_shape,
2702 },
2703 );
2704 self.text.lamport_clock.observe(lamport_timestamp);
2705 self.non_text_state_update_count += 1;
2706 }
2707 Operation::UpdateCompletionTriggers {
2708 triggers,
2709 lamport_timestamp,
2710 server_id,
2711 } => {
2712 if triggers.is_empty() {
2713 self.completion_triggers_per_language_server
2714 .remove(&server_id);
2715 self.completion_triggers = self
2716 .completion_triggers_per_language_server
2717 .values()
2718 .flat_map(|triggers| triggers.iter().cloned())
2719 .collect();
2720 } else {
2721 self.completion_triggers_per_language_server
2722 .insert(server_id, triggers.iter().cloned().collect());
2723 self.completion_triggers.extend(triggers);
2724 }
2725 self.text.lamport_clock.observe(lamport_timestamp);
2726 }
2727 Operation::UpdateLineEnding {
2728 line_ending,
2729 lamport_timestamp,
2730 } => {
2731 self.text.set_line_ending(line_ending);
2732 self.text.lamport_clock.observe(lamport_timestamp);
2733 }
2734 }
2735 }
2736
2737 fn apply_diagnostic_update(
2738 &mut self,
2739 server_id: LanguageServerId,
2740 diagnostics: DiagnosticSet,
2741 lamport_timestamp: clock::Lamport,
2742 cx: &mut Context<Self>,
2743 ) {
2744 if lamport_timestamp > self.diagnostics_timestamp {
2745 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2746 if diagnostics.is_empty() {
2747 if let Ok(ix) = ix {
2748 self.diagnostics.remove(ix);
2749 }
2750 } else {
2751 match ix {
2752 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2753 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2754 };
2755 }
2756 self.diagnostics_timestamp = lamport_timestamp;
2757 self.non_text_state_update_count += 1;
2758 self.text.lamport_clock.observe(lamport_timestamp);
2759 cx.notify();
2760 cx.emit(BufferEvent::DiagnosticsUpdated);
2761 }
2762 }
2763
2764 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2765 self.was_changed();
2766 cx.emit(BufferEvent::Operation {
2767 operation,
2768 is_local,
2769 });
2770 }
2771
2772 /// Removes the selections for a given peer.
2773 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2774 self.remote_selections.remove(&replica_id);
2775 cx.notify();
2776 }
2777
2778 /// Undoes the most recent transaction.
2779 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2780 let was_dirty = self.is_dirty();
2781 let old_version = self.version.clone();
2782
2783 if let Some((transaction_id, operation)) = self.text.undo() {
2784 self.send_operation(Operation::Buffer(operation), true, cx);
2785 self.did_edit(&old_version, was_dirty, cx);
2786 Some(transaction_id)
2787 } else {
2788 None
2789 }
2790 }
2791
2792 /// Manually undoes a specific transaction in the buffer's undo history.
2793 pub fn undo_transaction(
2794 &mut self,
2795 transaction_id: TransactionId,
2796 cx: &mut Context<Self>,
2797 ) -> bool {
2798 let was_dirty = self.is_dirty();
2799 let old_version = self.version.clone();
2800 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2801 self.send_operation(Operation::Buffer(operation), true, cx);
2802 self.did_edit(&old_version, was_dirty, cx);
2803 true
2804 } else {
2805 false
2806 }
2807 }
2808
2809 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2810 pub fn undo_to_transaction(
2811 &mut self,
2812 transaction_id: TransactionId,
2813 cx: &mut Context<Self>,
2814 ) -> bool {
2815 let was_dirty = self.is_dirty();
2816 let old_version = self.version.clone();
2817
2818 let operations = self.text.undo_to_transaction(transaction_id);
2819 let undone = !operations.is_empty();
2820 for operation in operations {
2821 self.send_operation(Operation::Buffer(operation), true, cx);
2822 }
2823 if undone {
2824 self.did_edit(&old_version, was_dirty, cx)
2825 }
2826 undone
2827 }
2828
2829 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2830 let was_dirty = self.is_dirty();
2831 let operation = self.text.undo_operations(counts);
2832 let old_version = self.version.clone();
2833 self.send_operation(Operation::Buffer(operation), true, cx);
2834 self.did_edit(&old_version, was_dirty, cx);
2835 }
2836
2837 /// Manually redoes a specific transaction in the buffer's redo history.
2838 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2839 let was_dirty = self.is_dirty();
2840 let old_version = self.version.clone();
2841
2842 if let Some((transaction_id, operation)) = self.text.redo() {
2843 self.send_operation(Operation::Buffer(operation), true, cx);
2844 self.did_edit(&old_version, was_dirty, cx);
2845 Some(transaction_id)
2846 } else {
2847 None
2848 }
2849 }
2850
2851 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2852 pub fn redo_to_transaction(
2853 &mut self,
2854 transaction_id: TransactionId,
2855 cx: &mut Context<Self>,
2856 ) -> bool {
2857 let was_dirty = self.is_dirty();
2858 let old_version = self.version.clone();
2859
2860 let operations = self.text.redo_to_transaction(transaction_id);
2861 let redone = !operations.is_empty();
2862 for operation in operations {
2863 self.send_operation(Operation::Buffer(operation), true, cx);
2864 }
2865 if redone {
2866 self.did_edit(&old_version, was_dirty, cx)
2867 }
2868 redone
2869 }
2870
2871 /// Override current completion triggers with the user-provided completion triggers.
2872 pub fn set_completion_triggers(
2873 &mut self,
2874 server_id: LanguageServerId,
2875 triggers: BTreeSet<String>,
2876 cx: &mut Context<Self>,
2877 ) {
2878 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2879 if triggers.is_empty() {
2880 self.completion_triggers_per_language_server
2881 .remove(&server_id);
2882 self.completion_triggers = self
2883 .completion_triggers_per_language_server
2884 .values()
2885 .flat_map(|triggers| triggers.iter().cloned())
2886 .collect();
2887 } else {
2888 self.completion_triggers_per_language_server
2889 .insert(server_id, triggers.clone());
2890 self.completion_triggers.extend(triggers.iter().cloned());
2891 }
2892 self.send_operation(
2893 Operation::UpdateCompletionTriggers {
2894 triggers: triggers.into_iter().collect(),
2895 lamport_timestamp: self.completion_triggers_timestamp,
2896 server_id,
2897 },
2898 true,
2899 cx,
2900 );
2901 cx.notify();
2902 }
2903
2904 /// Returns a list of strings which trigger a completion menu for this language.
2905 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2906 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2907 &self.completion_triggers
2908 }
2909
2910 /// Call this directly after performing edits to prevent the preview tab
2911 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2912 /// to return false until there are additional edits.
2913 pub fn refresh_preview(&mut self) {
2914 self.preview_version = self.version.clone();
2915 }
2916
2917 /// Whether we should preserve the preview status of a tab containing this buffer.
2918 pub fn preserve_preview(&self) -> bool {
2919 !self.has_edits_since(&self.preview_version)
2920 }
2921}
2922
2923#[doc(hidden)]
2924#[cfg(any(test, feature = "test-support"))]
2925impl Buffer {
2926 pub fn edit_via_marked_text(
2927 &mut self,
2928 marked_string: &str,
2929 autoindent_mode: Option<AutoindentMode>,
2930 cx: &mut Context<Self>,
2931 ) {
2932 let edits = self.edits_for_marked_text(marked_string);
2933 self.edit(edits, autoindent_mode, cx);
2934 }
2935
2936 pub fn set_group_interval(&mut self, group_interval: Duration) {
2937 self.text.set_group_interval(group_interval);
2938 }
2939
2940 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2941 where
2942 T: rand::Rng,
2943 {
2944 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2945 let mut last_end = None;
2946 for _ in 0..old_range_count {
2947 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2948 break;
2949 }
2950
2951 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2952 let mut range = self.random_byte_range(new_start, rng);
2953 if rng.random_bool(0.2) {
2954 mem::swap(&mut range.start, &mut range.end);
2955 }
2956 last_end = Some(range.end);
2957
2958 let new_text_len = rng.random_range(0..10);
2959 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2960 new_text = new_text.to_uppercase();
2961
2962 edits.push((range, new_text));
2963 }
2964 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2965 self.edit(edits, None, cx);
2966 }
2967
2968 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2969 let was_dirty = self.is_dirty();
2970 let old_version = self.version.clone();
2971
2972 let ops = self.text.randomly_undo_redo(rng);
2973 if !ops.is_empty() {
2974 for op in ops {
2975 self.send_operation(Operation::Buffer(op), true, cx);
2976 self.did_edit(&old_version, was_dirty, cx);
2977 }
2978 }
2979 }
2980}
2981
2982impl EventEmitter<BufferEvent> for Buffer {}
2983
2984impl Deref for Buffer {
2985 type Target = TextBuffer;
2986
2987 fn deref(&self) -> &Self::Target {
2988 &self.text
2989 }
2990}
2991
2992impl BufferSnapshot {
2993 /// Returns [`IndentSize`] for a given line that respects user settings and
2994 /// language preferences.
2995 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2996 indent_size_for_line(self, row)
2997 }
2998
2999 /// Returns [`IndentSize`] for a given position that respects user settings
3000 /// and language preferences.
3001 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3002 let settings = language_settings(
3003 self.language_at(position).map(|l| l.name()),
3004 self.file(),
3005 cx,
3006 );
3007 if settings.hard_tabs {
3008 IndentSize::tab()
3009 } else {
3010 IndentSize::spaces(settings.tab_size.get())
3011 }
3012 }
3013
3014 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3015 /// is passed in as `single_indent_size`.
3016 pub fn suggested_indents(
3017 &self,
3018 rows: impl Iterator<Item = u32>,
3019 single_indent_size: IndentSize,
3020 ) -> BTreeMap<u32, IndentSize> {
3021 let mut result = BTreeMap::new();
3022
3023 for row_range in contiguous_ranges(rows, 10) {
3024 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3025 Some(suggestions) => suggestions,
3026 _ => break,
3027 };
3028
3029 for (row, suggestion) in row_range.zip(suggestions) {
3030 let indent_size = if let Some(suggestion) = suggestion {
3031 result
3032 .get(&suggestion.basis_row)
3033 .copied()
3034 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3035 .with_delta(suggestion.delta, single_indent_size)
3036 } else {
3037 self.indent_size_for_line(row)
3038 };
3039
3040 result.insert(row, indent_size);
3041 }
3042 }
3043
3044 result
3045 }
3046
3047 fn suggest_autoindents(
3048 &self,
3049 row_range: Range<u32>,
3050 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3051 let config = &self.language.as_ref()?.config;
3052 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3053
3054 #[derive(Debug, Clone)]
3055 struct StartPosition {
3056 start: Point,
3057 suffix: SharedString,
3058 }
3059
3060 // Find the suggested indentation ranges based on the syntax tree.
3061 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3062 let end = Point::new(row_range.end, 0);
3063 let range = (start..end).to_offset(&self.text);
3064 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3065 Some(&grammar.indents_config.as_ref()?.query)
3066 });
3067 let indent_configs = matches
3068 .grammars()
3069 .iter()
3070 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3071 .collect::<Vec<_>>();
3072
3073 let mut indent_ranges = Vec::<Range<Point>>::new();
3074 let mut start_positions = Vec::<StartPosition>::new();
3075 let mut outdent_positions = Vec::<Point>::new();
3076 while let Some(mat) = matches.peek() {
3077 let mut start: Option<Point> = None;
3078 let mut end: Option<Point> = None;
3079
3080 let config = indent_configs[mat.grammar_index];
3081 for capture in mat.captures {
3082 if capture.index == config.indent_capture_ix {
3083 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3084 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3085 } else if Some(capture.index) == config.start_capture_ix {
3086 start = Some(Point::from_ts_point(capture.node.end_position()));
3087 } else if Some(capture.index) == config.end_capture_ix {
3088 end = Some(Point::from_ts_point(capture.node.start_position()));
3089 } else if Some(capture.index) == config.outdent_capture_ix {
3090 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3091 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3092 start_positions.push(StartPosition {
3093 start: Point::from_ts_point(capture.node.start_position()),
3094 suffix: suffix.clone(),
3095 });
3096 }
3097 }
3098
3099 matches.advance();
3100 if let Some((start, end)) = start.zip(end) {
3101 if start.row == end.row {
3102 continue;
3103 }
3104 let range = start..end;
3105 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3106 Err(ix) => indent_ranges.insert(ix, range),
3107 Ok(ix) => {
3108 let prev_range = &mut indent_ranges[ix];
3109 prev_range.end = prev_range.end.max(range.end);
3110 }
3111 }
3112 }
3113 }
3114
3115 let mut error_ranges = Vec::<Range<Point>>::new();
3116 let mut matches = self
3117 .syntax
3118 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3119 while let Some(mat) = matches.peek() {
3120 let node = mat.captures[0].node;
3121 let start = Point::from_ts_point(node.start_position());
3122 let end = Point::from_ts_point(node.end_position());
3123 let range = start..end;
3124 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3125 Ok(ix) | Err(ix) => ix,
3126 };
3127 let mut end_ix = ix;
3128 while let Some(existing_range) = error_ranges.get(end_ix) {
3129 if existing_range.end < end {
3130 end_ix += 1;
3131 } else {
3132 break;
3133 }
3134 }
3135 error_ranges.splice(ix..end_ix, [range]);
3136 matches.advance();
3137 }
3138
3139 outdent_positions.sort();
3140 for outdent_position in outdent_positions {
3141 // find the innermost indent range containing this outdent_position
3142 // set its end to the outdent position
3143 if let Some(range_to_truncate) = indent_ranges
3144 .iter_mut()
3145 .filter(|indent_range| indent_range.contains(&outdent_position))
3146 .next_back()
3147 {
3148 range_to_truncate.end = outdent_position;
3149 }
3150 }
3151
3152 start_positions.sort_by_key(|b| b.start);
3153
3154 // Find the suggested indentation increases and decreased based on regexes.
3155 let mut regex_outdent_map = HashMap::default();
3156 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3157 let mut start_positions_iter = start_positions.iter().peekable();
3158
3159 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3160 self.for_each_line(
3161 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3162 ..Point::new(row_range.end, 0),
3163 |row, line| {
3164 if config
3165 .decrease_indent_pattern
3166 .as_ref()
3167 .is_some_and(|regex| regex.is_match(line))
3168 {
3169 indent_change_rows.push((row, Ordering::Less));
3170 }
3171 if config
3172 .increase_indent_pattern
3173 .as_ref()
3174 .is_some_and(|regex| regex.is_match(line))
3175 {
3176 indent_change_rows.push((row + 1, Ordering::Greater));
3177 }
3178 while let Some(pos) = start_positions_iter.peek() {
3179 if pos.start.row < row {
3180 let pos = start_positions_iter.next().unwrap();
3181 last_seen_suffix
3182 .entry(pos.suffix.to_string())
3183 .or_default()
3184 .push(pos.start);
3185 } else {
3186 break;
3187 }
3188 }
3189 for rule in &config.decrease_indent_patterns {
3190 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3191 let row_start_column = self.indent_size_for_line(row).len;
3192 let basis_row = rule
3193 .valid_after
3194 .iter()
3195 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3196 .flatten()
3197 .filter(|start_point| start_point.column <= row_start_column)
3198 .max_by_key(|start_point| start_point.row);
3199 if let Some(outdent_to_row) = basis_row {
3200 regex_outdent_map.insert(row, outdent_to_row.row);
3201 }
3202 break;
3203 }
3204 }
3205 },
3206 );
3207
3208 let mut indent_changes = indent_change_rows.into_iter().peekable();
3209 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3210 prev_non_blank_row.unwrap_or(0)
3211 } else {
3212 row_range.start.saturating_sub(1)
3213 };
3214
3215 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3216 Some(row_range.map(move |row| {
3217 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3218
3219 let mut indent_from_prev_row = false;
3220 let mut outdent_from_prev_row = false;
3221 let mut outdent_to_row = u32::MAX;
3222 let mut from_regex = false;
3223
3224 while let Some((indent_row, delta)) = indent_changes.peek() {
3225 match indent_row.cmp(&row) {
3226 Ordering::Equal => match delta {
3227 Ordering::Less => {
3228 from_regex = true;
3229 outdent_from_prev_row = true
3230 }
3231 Ordering::Greater => {
3232 indent_from_prev_row = true;
3233 from_regex = true
3234 }
3235 _ => {}
3236 },
3237
3238 Ordering::Greater => break,
3239 Ordering::Less => {}
3240 }
3241
3242 indent_changes.next();
3243 }
3244
3245 for range in &indent_ranges {
3246 if range.start.row >= row {
3247 break;
3248 }
3249 if range.start.row == prev_row && range.end > row_start {
3250 indent_from_prev_row = true;
3251 }
3252 if range.end > prev_row_start && range.end <= row_start {
3253 outdent_to_row = outdent_to_row.min(range.start.row);
3254 }
3255 }
3256
3257 if let Some(basis_row) = regex_outdent_map.get(&row) {
3258 indent_from_prev_row = false;
3259 outdent_to_row = *basis_row;
3260 from_regex = true;
3261 }
3262
3263 let within_error = error_ranges
3264 .iter()
3265 .any(|e| e.start.row < row && e.end > row_start);
3266
3267 let suggestion = if outdent_to_row == prev_row
3268 || (outdent_from_prev_row && indent_from_prev_row)
3269 {
3270 Some(IndentSuggestion {
3271 basis_row: prev_row,
3272 delta: Ordering::Equal,
3273 within_error: within_error && !from_regex,
3274 })
3275 } else if indent_from_prev_row {
3276 Some(IndentSuggestion {
3277 basis_row: prev_row,
3278 delta: Ordering::Greater,
3279 within_error: within_error && !from_regex,
3280 })
3281 } else if outdent_to_row < prev_row {
3282 Some(IndentSuggestion {
3283 basis_row: outdent_to_row,
3284 delta: Ordering::Equal,
3285 within_error: within_error && !from_regex,
3286 })
3287 } else if outdent_from_prev_row {
3288 Some(IndentSuggestion {
3289 basis_row: prev_row,
3290 delta: Ordering::Less,
3291 within_error: within_error && !from_regex,
3292 })
3293 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3294 {
3295 Some(IndentSuggestion {
3296 basis_row: prev_row,
3297 delta: Ordering::Equal,
3298 within_error: within_error && !from_regex,
3299 })
3300 } else {
3301 None
3302 };
3303
3304 prev_row = row;
3305 prev_row_start = row_start;
3306 suggestion
3307 }))
3308 }
3309
3310 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3311 while row > 0 {
3312 row -= 1;
3313 if !self.is_line_blank(row) {
3314 return Some(row);
3315 }
3316 }
3317 None
3318 }
3319
3320 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3321 let captures = self.syntax.captures(range, &self.text, |grammar| {
3322 grammar
3323 .highlights_config
3324 .as_ref()
3325 .map(|config| &config.query)
3326 });
3327 let highlight_maps = captures
3328 .grammars()
3329 .iter()
3330 .map(|grammar| grammar.highlight_map())
3331 .collect();
3332 (captures, highlight_maps)
3333 }
3334
3335 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3336 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3337 /// returned in chunks where each chunk has a single syntax highlighting style and
3338 /// diagnostic status.
3339 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3340 let range = range.start.to_offset(self)..range.end.to_offset(self);
3341
3342 let mut syntax = None;
3343 if language_aware {
3344 syntax = Some(self.get_highlights(range.clone()));
3345 }
3346 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3347 let diagnostics = language_aware;
3348 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3349 }
3350
3351 pub fn highlighted_text_for_range<T: ToOffset>(
3352 &self,
3353 range: Range<T>,
3354 override_style: Option<HighlightStyle>,
3355 syntax_theme: &SyntaxTheme,
3356 ) -> HighlightedText {
3357 HighlightedText::from_buffer_range(
3358 range,
3359 &self.text,
3360 &self.syntax,
3361 override_style,
3362 syntax_theme,
3363 )
3364 }
3365
3366 /// Invokes the given callback for each line of text in the given range of the buffer.
3367 /// Uses callback to avoid allocating a string for each line.
3368 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3369 let mut line = String::new();
3370 let mut row = range.start.row;
3371 for chunk in self
3372 .as_rope()
3373 .chunks_in_range(range.to_offset(self))
3374 .chain(["\n"])
3375 {
3376 for (newline_ix, text) in chunk.split('\n').enumerate() {
3377 if newline_ix > 0 {
3378 callback(row, &line);
3379 row += 1;
3380 line.clear();
3381 }
3382 line.push_str(text);
3383 }
3384 }
3385 }
3386
3387 /// Iterates over every [`SyntaxLayer`] in the buffer.
3388 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3389 self.syntax_layers_for_range(0..self.len(), true)
3390 }
3391
3392 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3393 let offset = position.to_offset(self);
3394 self.syntax_layers_for_range(offset..offset, false)
3395 .filter(|l| l.node().end_byte() > offset)
3396 .last()
3397 }
3398
3399 pub fn syntax_layers_for_range<D: ToOffset>(
3400 &self,
3401 range: Range<D>,
3402 include_hidden: bool,
3403 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3404 self.syntax
3405 .layers_for_range(range, &self.text, include_hidden)
3406 }
3407
3408 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3409 &self,
3410 range: Range<D>,
3411 ) -> Option<SyntaxLayer<'_>> {
3412 let range = range.to_offset(self);
3413 self.syntax
3414 .layers_for_range(range, &self.text, false)
3415 .max_by(|a, b| {
3416 if a.depth != b.depth {
3417 a.depth.cmp(&b.depth)
3418 } else if a.offset.0 != b.offset.0 {
3419 a.offset.0.cmp(&b.offset.0)
3420 } else {
3421 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3422 }
3423 })
3424 }
3425
3426 /// Returns the main [`Language`].
3427 pub fn language(&self) -> Option<&Arc<Language>> {
3428 self.language.as_ref()
3429 }
3430
3431 /// Returns the [`Language`] at the given location.
3432 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3433 self.syntax_layer_at(position)
3434 .map(|info| info.language)
3435 .or(self.language.as_ref())
3436 }
3437
3438 /// Returns the settings for the language at the given location.
3439 pub fn settings_at<'a, D: ToOffset>(
3440 &'a self,
3441 position: D,
3442 cx: &'a App,
3443 ) -> Cow<'a, LanguageSettings> {
3444 language_settings(
3445 self.language_at(position).map(|l| l.name()),
3446 self.file.as_ref(),
3447 cx,
3448 )
3449 }
3450
3451 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3452 CharClassifier::new(self.language_scope_at(point))
3453 }
3454
3455 /// Returns the [`LanguageScope`] at the given location.
3456 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3457 let offset = position.to_offset(self);
3458 let mut scope = None;
3459 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3460
3461 // Use the layer that has the smallest node intersecting the given point.
3462 for layer in self
3463 .syntax
3464 .layers_for_range(offset..offset, &self.text, false)
3465 {
3466 let mut cursor = layer.node().walk();
3467
3468 let mut range = None;
3469 loop {
3470 let child_range = cursor.node().byte_range();
3471 if !child_range.contains(&offset) {
3472 break;
3473 }
3474
3475 range = Some(child_range);
3476 if cursor.goto_first_child_for_byte(offset).is_none() {
3477 break;
3478 }
3479 }
3480
3481 if let Some(range) = range
3482 && smallest_range_and_depth.as_ref().is_none_or(
3483 |(smallest_range, smallest_range_depth)| {
3484 if layer.depth > *smallest_range_depth {
3485 true
3486 } else if layer.depth == *smallest_range_depth {
3487 range.len() < smallest_range.len()
3488 } else {
3489 false
3490 }
3491 },
3492 )
3493 {
3494 smallest_range_and_depth = Some((range, layer.depth));
3495 scope = Some(LanguageScope {
3496 language: layer.language.clone(),
3497 override_id: layer.override_id(offset, &self.text),
3498 });
3499 }
3500 }
3501
3502 scope.or_else(|| {
3503 self.language.clone().map(|language| LanguageScope {
3504 language,
3505 override_id: None,
3506 })
3507 })
3508 }
3509
3510 /// Returns a tuple of the range and character kind of the word
3511 /// surrounding the given position.
3512 pub fn surrounding_word<T: ToOffset>(
3513 &self,
3514 start: T,
3515 scope_context: Option<CharScopeContext>,
3516 ) -> (Range<usize>, Option<CharKind>) {
3517 let mut start = start.to_offset(self);
3518 let mut end = start;
3519 let mut next_chars = self.chars_at(start).take(128).peekable();
3520 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3521
3522 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3523 let word_kind = cmp::max(
3524 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3525 next_chars.peek().copied().map(|c| classifier.kind(c)),
3526 );
3527
3528 for ch in prev_chars {
3529 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3530 start -= ch.len_utf8();
3531 } else {
3532 break;
3533 }
3534 }
3535
3536 for ch in next_chars {
3537 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3538 end += ch.len_utf8();
3539 } else {
3540 break;
3541 }
3542 }
3543
3544 (start..end, word_kind)
3545 }
3546
3547 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3548 /// range. When `require_larger` is true, the node found must be larger than the query range.
3549 ///
3550 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3551 /// be moved to the root of the tree.
3552 fn goto_node_enclosing_range(
3553 cursor: &mut tree_sitter::TreeCursor,
3554 query_range: &Range<usize>,
3555 require_larger: bool,
3556 ) -> bool {
3557 let mut ascending = false;
3558 loop {
3559 let mut range = cursor.node().byte_range();
3560 if query_range.is_empty() {
3561 // When the query range is empty and the current node starts after it, move to the
3562 // previous sibling to find the node the containing node.
3563 if range.start > query_range.start {
3564 cursor.goto_previous_sibling();
3565 range = cursor.node().byte_range();
3566 }
3567 } else {
3568 // When the query range is non-empty and the current node ends exactly at the start,
3569 // move to the next sibling to find a node that extends beyond the start.
3570 if range.end == query_range.start {
3571 cursor.goto_next_sibling();
3572 range = cursor.node().byte_range();
3573 }
3574 }
3575
3576 let encloses = range.contains_inclusive(query_range)
3577 && (!require_larger || range.len() > query_range.len());
3578 if !encloses {
3579 ascending = true;
3580 if !cursor.goto_parent() {
3581 return false;
3582 }
3583 continue;
3584 } else if ascending {
3585 return true;
3586 }
3587
3588 // Descend into the current node.
3589 if cursor
3590 .goto_first_child_for_byte(query_range.start)
3591 .is_none()
3592 {
3593 return true;
3594 }
3595 }
3596 }
3597
3598 pub fn syntax_ancestor<'a, T: ToOffset>(
3599 &'a self,
3600 range: Range<T>,
3601 ) -> Option<tree_sitter::Node<'a>> {
3602 let range = range.start.to_offset(self)..range.end.to_offset(self);
3603 let mut result: Option<tree_sitter::Node<'a>> = None;
3604 for layer in self
3605 .syntax
3606 .layers_for_range(range.clone(), &self.text, true)
3607 {
3608 let mut cursor = layer.node().walk();
3609
3610 // Find the node that both contains the range and is larger than it.
3611 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3612 continue;
3613 }
3614
3615 let left_node = cursor.node();
3616 let mut layer_result = left_node;
3617
3618 // For an empty range, try to find another node immediately to the right of the range.
3619 if left_node.end_byte() == range.start {
3620 let mut right_node = None;
3621 while !cursor.goto_next_sibling() {
3622 if !cursor.goto_parent() {
3623 break;
3624 }
3625 }
3626
3627 while cursor.node().start_byte() == range.start {
3628 right_node = Some(cursor.node());
3629 if !cursor.goto_first_child() {
3630 break;
3631 }
3632 }
3633
3634 // If there is a candidate node on both sides of the (empty) range, then
3635 // decide between the two by favoring a named node over an anonymous token.
3636 // If both nodes are the same in that regard, favor the right one.
3637 if let Some(right_node) = right_node
3638 && (right_node.is_named() || !left_node.is_named())
3639 {
3640 layer_result = right_node;
3641 }
3642 }
3643
3644 if let Some(previous_result) = &result
3645 && previous_result.byte_range().len() < layer_result.byte_range().len()
3646 {
3647 continue;
3648 }
3649 result = Some(layer_result);
3650 }
3651
3652 result
3653 }
3654
3655 /// Find the previous sibling syntax node at the given range.
3656 ///
3657 /// This function locates the syntax node that precedes the node containing
3658 /// the given range. It searches hierarchically by:
3659 /// 1. Finding the node that contains the given range
3660 /// 2. Looking for the previous sibling at the same tree level
3661 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3662 ///
3663 /// Returns `None` if there is no previous sibling at any ancestor level.
3664 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3665 &'a self,
3666 range: Range<T>,
3667 ) -> Option<tree_sitter::Node<'a>> {
3668 let range = range.start.to_offset(self)..range.end.to_offset(self);
3669 let mut result: Option<tree_sitter::Node<'a>> = None;
3670
3671 for layer in self
3672 .syntax
3673 .layers_for_range(range.clone(), &self.text, true)
3674 {
3675 let mut cursor = layer.node().walk();
3676
3677 // Find the node that contains the range
3678 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3679 continue;
3680 }
3681
3682 // Look for the previous sibling, moving up ancestor levels if needed
3683 loop {
3684 if cursor.goto_previous_sibling() {
3685 let layer_result = cursor.node();
3686
3687 if let Some(previous_result) = &result {
3688 if previous_result.byte_range().end < layer_result.byte_range().end {
3689 continue;
3690 }
3691 }
3692 result = Some(layer_result);
3693 break;
3694 }
3695
3696 // No sibling found at this level, try moving up to parent
3697 if !cursor.goto_parent() {
3698 break;
3699 }
3700 }
3701 }
3702
3703 result
3704 }
3705
3706 /// Find the next sibling syntax node at the given range.
3707 ///
3708 /// This function locates the syntax node that follows the node containing
3709 /// the given range. It searches hierarchically by:
3710 /// 1. Finding the node that contains the given range
3711 /// 2. Looking for the next sibling at the same tree level
3712 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3713 ///
3714 /// Returns `None` if there is no next sibling at any ancestor level.
3715 pub fn syntax_next_sibling<'a, T: ToOffset>(
3716 &'a self,
3717 range: Range<T>,
3718 ) -> Option<tree_sitter::Node<'a>> {
3719 let range = range.start.to_offset(self)..range.end.to_offset(self);
3720 let mut result: Option<tree_sitter::Node<'a>> = None;
3721
3722 for layer in self
3723 .syntax
3724 .layers_for_range(range.clone(), &self.text, true)
3725 {
3726 let mut cursor = layer.node().walk();
3727
3728 // Find the node that contains the range
3729 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3730 continue;
3731 }
3732
3733 // Look for the next sibling, moving up ancestor levels if needed
3734 loop {
3735 if cursor.goto_next_sibling() {
3736 let layer_result = cursor.node();
3737
3738 if let Some(previous_result) = &result {
3739 if previous_result.byte_range().start > layer_result.byte_range().start {
3740 continue;
3741 }
3742 }
3743 result = Some(layer_result);
3744 break;
3745 }
3746
3747 // No sibling found at this level, try moving up to parent
3748 if !cursor.goto_parent() {
3749 break;
3750 }
3751 }
3752 }
3753
3754 result
3755 }
3756
3757 /// Returns the root syntax node within the given row
3758 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3759 let start_offset = position.to_offset(self);
3760
3761 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3762
3763 let layer = self
3764 .syntax
3765 .layers_for_range(start_offset..start_offset, &self.text, true)
3766 .next()?;
3767
3768 let mut cursor = layer.node().walk();
3769
3770 // Descend to the first leaf that touches the start of the range.
3771 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3772 if cursor.node().end_byte() == start_offset {
3773 cursor.goto_next_sibling();
3774 }
3775 }
3776
3777 // Ascend to the root node within the same row.
3778 while cursor.goto_parent() {
3779 if cursor.node().start_position().row != row {
3780 break;
3781 }
3782 }
3783
3784 Some(cursor.node())
3785 }
3786
3787 /// Returns the outline for the buffer.
3788 ///
3789 /// This method allows passing an optional [`SyntaxTheme`] to
3790 /// syntax-highlight the returned symbols.
3791 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3792 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3793 }
3794
3795 /// Returns all the symbols that contain the given position.
3796 ///
3797 /// This method allows passing an optional [`SyntaxTheme`] to
3798 /// syntax-highlight the returned symbols.
3799 pub fn symbols_containing<T: ToOffset>(
3800 &self,
3801 position: T,
3802 theme: Option<&SyntaxTheme>,
3803 ) -> Vec<OutlineItem<Anchor>> {
3804 let position = position.to_offset(self);
3805 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3806 let end = self.clip_offset(position + 1, Bias::Right);
3807 let mut items = self.outline_items_containing(start..end, false, theme);
3808 let mut prev_depth = None;
3809 items.retain(|item| {
3810 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3811 prev_depth = Some(item.depth);
3812 result
3813 });
3814 items
3815 }
3816
3817 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3818 let range = range.to_offset(self);
3819 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3820 grammar.outline_config.as_ref().map(|c| &c.query)
3821 });
3822 let configs = matches
3823 .grammars()
3824 .iter()
3825 .map(|g| g.outline_config.as_ref().unwrap())
3826 .collect::<Vec<_>>();
3827
3828 while let Some(mat) = matches.peek() {
3829 let config = &configs[mat.grammar_index];
3830 let containing_item_node = maybe!({
3831 let item_node = mat.captures.iter().find_map(|cap| {
3832 if cap.index == config.item_capture_ix {
3833 Some(cap.node)
3834 } else {
3835 None
3836 }
3837 })?;
3838
3839 let item_byte_range = item_node.byte_range();
3840 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3841 None
3842 } else {
3843 Some(item_node)
3844 }
3845 });
3846
3847 if let Some(item_node) = containing_item_node {
3848 return Some(
3849 Point::from_ts_point(item_node.start_position())
3850 ..Point::from_ts_point(item_node.end_position()),
3851 );
3852 }
3853
3854 matches.advance();
3855 }
3856 None
3857 }
3858
3859 pub fn outline_items_containing<T: ToOffset>(
3860 &self,
3861 range: Range<T>,
3862 include_extra_context: bool,
3863 theme: Option<&SyntaxTheme>,
3864 ) -> Vec<OutlineItem<Anchor>> {
3865 self.outline_items_containing_internal(
3866 range,
3867 include_extra_context,
3868 theme,
3869 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3870 )
3871 }
3872
3873 pub fn outline_items_as_points_containing<T: ToOffset>(
3874 &self,
3875 range: Range<T>,
3876 include_extra_context: bool,
3877 theme: Option<&SyntaxTheme>,
3878 ) -> Vec<OutlineItem<Point>> {
3879 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3880 range
3881 })
3882 }
3883
3884 fn outline_items_containing_internal<T: ToOffset, U>(
3885 &self,
3886 range: Range<T>,
3887 include_extra_context: bool,
3888 theme: Option<&SyntaxTheme>,
3889 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3890 ) -> Vec<OutlineItem<U>> {
3891 let range = range.to_offset(self);
3892 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3893 grammar.outline_config.as_ref().map(|c| &c.query)
3894 });
3895
3896 let mut items = Vec::new();
3897 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3898 while let Some(mat) = matches.peek() {
3899 let config = matches.grammars()[mat.grammar_index]
3900 .outline_config
3901 .as_ref()
3902 .unwrap();
3903 if let Some(item) =
3904 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3905 {
3906 items.push(item);
3907 } else if let Some(capture) = mat
3908 .captures
3909 .iter()
3910 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3911 {
3912 let capture_range = capture.node.start_position()..capture.node.end_position();
3913 let mut capture_row_range =
3914 capture_range.start.row as u32..capture_range.end.row as u32;
3915 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3916 {
3917 capture_row_range.end -= 1;
3918 }
3919 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3920 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3921 last_row_range.end = capture_row_range.end;
3922 } else {
3923 annotation_row_ranges.push(capture_row_range);
3924 }
3925 } else {
3926 annotation_row_ranges.push(capture_row_range);
3927 }
3928 }
3929 matches.advance();
3930 }
3931
3932 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3933
3934 // Assign depths based on containment relationships and convert to anchors.
3935 let mut item_ends_stack = Vec::<Point>::new();
3936 let mut anchor_items = Vec::new();
3937 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3938 for item in items {
3939 while let Some(last_end) = item_ends_stack.last().copied() {
3940 if last_end < item.range.end {
3941 item_ends_stack.pop();
3942 } else {
3943 break;
3944 }
3945 }
3946
3947 let mut annotation_row_range = None;
3948 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3949 let row_preceding_item = item.range.start.row.saturating_sub(1);
3950 if next_annotation_row_range.end < row_preceding_item {
3951 annotation_row_ranges.next();
3952 } else {
3953 if next_annotation_row_range.end == row_preceding_item {
3954 annotation_row_range = Some(next_annotation_row_range.clone());
3955 annotation_row_ranges.next();
3956 }
3957 break;
3958 }
3959 }
3960
3961 anchor_items.push(OutlineItem {
3962 depth: item_ends_stack.len(),
3963 range: range_callback(self, item.range.clone()),
3964 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3965 text: item.text,
3966 highlight_ranges: item.highlight_ranges,
3967 name_ranges: item.name_ranges,
3968 body_range: item.body_range.map(|r| range_callback(self, r)),
3969 annotation_range: annotation_row_range.map(|annotation_range| {
3970 let point_range = Point::new(annotation_range.start, 0)
3971 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3972 range_callback(self, point_range)
3973 }),
3974 });
3975 item_ends_stack.push(item.range.end);
3976 }
3977
3978 anchor_items
3979 }
3980
3981 fn next_outline_item(
3982 &self,
3983 config: &OutlineConfig,
3984 mat: &SyntaxMapMatch,
3985 range: &Range<usize>,
3986 include_extra_context: bool,
3987 theme: Option<&SyntaxTheme>,
3988 ) -> Option<OutlineItem<Point>> {
3989 let item_node = mat.captures.iter().find_map(|cap| {
3990 if cap.index == config.item_capture_ix {
3991 Some(cap.node)
3992 } else {
3993 None
3994 }
3995 })?;
3996
3997 let item_byte_range = item_node.byte_range();
3998 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3999 return None;
4000 }
4001 let item_point_range = Point::from_ts_point(item_node.start_position())
4002 ..Point::from_ts_point(item_node.end_position());
4003
4004 let mut open_point = None;
4005 let mut close_point = None;
4006
4007 let mut buffer_ranges = Vec::new();
4008 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4009 let mut range = node.start_byte()..node.end_byte();
4010 let start = node.start_position();
4011 if node.end_position().row > start.row {
4012 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4013 }
4014
4015 if !range.is_empty() {
4016 buffer_ranges.push((range, node_is_name));
4017 }
4018 };
4019
4020 for capture in mat.captures {
4021 if capture.index == config.name_capture_ix {
4022 add_to_buffer_ranges(capture.node, true);
4023 } else if Some(capture.index) == config.context_capture_ix
4024 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4025 {
4026 add_to_buffer_ranges(capture.node, false);
4027 } else {
4028 if Some(capture.index) == config.open_capture_ix {
4029 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4030 } else if Some(capture.index) == config.close_capture_ix {
4031 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4032 }
4033 }
4034 }
4035
4036 if buffer_ranges.is_empty() {
4037 return None;
4038 }
4039 let source_range_for_text =
4040 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4041
4042 let mut text = String::new();
4043 let mut highlight_ranges = Vec::new();
4044 let mut name_ranges = Vec::new();
4045 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4046 let mut last_buffer_range_end = 0;
4047 for (buffer_range, is_name) in buffer_ranges {
4048 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4049 if space_added {
4050 text.push(' ');
4051 }
4052 let before_append_len = text.len();
4053 let mut offset = buffer_range.start;
4054 chunks.seek(buffer_range.clone());
4055 for mut chunk in chunks.by_ref() {
4056 if chunk.text.len() > buffer_range.end - offset {
4057 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4058 offset = buffer_range.end;
4059 } else {
4060 offset += chunk.text.len();
4061 }
4062 let style = chunk
4063 .syntax_highlight_id
4064 .zip(theme)
4065 .and_then(|(highlight, theme)| highlight.style(theme));
4066 if let Some(style) = style {
4067 let start = text.len();
4068 let end = start + chunk.text.len();
4069 highlight_ranges.push((start..end, style));
4070 }
4071 text.push_str(chunk.text);
4072 if offset >= buffer_range.end {
4073 break;
4074 }
4075 }
4076 if is_name {
4077 let after_append_len = text.len();
4078 let start = if space_added && !name_ranges.is_empty() {
4079 before_append_len - 1
4080 } else {
4081 before_append_len
4082 };
4083 name_ranges.push(start..after_append_len);
4084 }
4085 last_buffer_range_end = buffer_range.end;
4086 }
4087
4088 Some(OutlineItem {
4089 depth: 0, // We'll calculate the depth later
4090 range: item_point_range,
4091 source_range_for_text: source_range_for_text.to_point(self),
4092 text,
4093 highlight_ranges,
4094 name_ranges,
4095 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4096 annotation_range: None,
4097 })
4098 }
4099
4100 pub fn function_body_fold_ranges<T: ToOffset>(
4101 &self,
4102 within: Range<T>,
4103 ) -> impl Iterator<Item = Range<usize>> + '_ {
4104 self.text_object_ranges(within, TreeSitterOptions::default())
4105 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4106 }
4107
4108 /// For each grammar in the language, runs the provided
4109 /// [`tree_sitter::Query`] against the given range.
4110 pub fn matches(
4111 &self,
4112 range: Range<usize>,
4113 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4114 ) -> SyntaxMapMatches<'_> {
4115 self.syntax.matches(range, self, query)
4116 }
4117
4118 pub fn all_bracket_ranges(
4119 &self,
4120 range: Range<usize>,
4121 ) -> impl Iterator<Item = BracketMatch> + '_ {
4122 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4123 grammar.brackets_config.as_ref().map(|c| &c.query)
4124 });
4125 let configs = matches
4126 .grammars()
4127 .iter()
4128 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4129 .collect::<Vec<_>>();
4130
4131 iter::from_fn(move || {
4132 while let Some(mat) = matches.peek() {
4133 let mut open = None;
4134 let mut close = None;
4135 let config = &configs[mat.grammar_index];
4136 let pattern = &config.patterns[mat.pattern_index];
4137 for capture in mat.captures {
4138 if capture.index == config.open_capture_ix {
4139 open = Some(capture.node.byte_range());
4140 } else if capture.index == config.close_capture_ix {
4141 close = Some(capture.node.byte_range());
4142 }
4143 }
4144
4145 matches.advance();
4146
4147 let Some((open_range, close_range)) = open.zip(close) else {
4148 continue;
4149 };
4150
4151 let bracket_range = open_range.start..=close_range.end;
4152 if !bracket_range.overlaps(&range) {
4153 continue;
4154 }
4155
4156 return Some(BracketMatch {
4157 open_range,
4158 close_range,
4159 newline_only: pattern.newline_only,
4160 });
4161 }
4162 None
4163 })
4164 }
4165
4166 /// Returns bracket range pairs overlapping or adjacent to `range`
4167 pub fn bracket_ranges<T: ToOffset>(
4168 &self,
4169 range: Range<T>,
4170 ) -> impl Iterator<Item = BracketMatch> + '_ {
4171 // Find bracket pairs that *inclusively* contain the given range.
4172 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4173 self.all_bracket_ranges(range)
4174 .filter(|pair| !pair.newline_only)
4175 }
4176
4177 pub fn debug_variables_query<T: ToOffset>(
4178 &self,
4179 range: Range<T>,
4180 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4181 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4182
4183 let mut matches = self.syntax.matches_with_options(
4184 range.clone(),
4185 &self.text,
4186 TreeSitterOptions::default(),
4187 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4188 );
4189
4190 let configs = matches
4191 .grammars()
4192 .iter()
4193 .map(|grammar| grammar.debug_variables_config.as_ref())
4194 .collect::<Vec<_>>();
4195
4196 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4197
4198 iter::from_fn(move || {
4199 loop {
4200 while let Some(capture) = captures.pop() {
4201 if capture.0.overlaps(&range) {
4202 return Some(capture);
4203 }
4204 }
4205
4206 let mat = matches.peek()?;
4207
4208 let Some(config) = configs[mat.grammar_index].as_ref() else {
4209 matches.advance();
4210 continue;
4211 };
4212
4213 for capture in mat.captures {
4214 let Some(ix) = config
4215 .objects_by_capture_ix
4216 .binary_search_by_key(&capture.index, |e| e.0)
4217 .ok()
4218 else {
4219 continue;
4220 };
4221 let text_object = config.objects_by_capture_ix[ix].1;
4222 let byte_range = capture.node.byte_range();
4223
4224 let mut found = false;
4225 for (range, existing) in captures.iter_mut() {
4226 if existing == &text_object {
4227 range.start = range.start.min(byte_range.start);
4228 range.end = range.end.max(byte_range.end);
4229 found = true;
4230 break;
4231 }
4232 }
4233
4234 if !found {
4235 captures.push((byte_range, text_object));
4236 }
4237 }
4238
4239 matches.advance();
4240 }
4241 })
4242 }
4243
4244 pub fn text_object_ranges<T: ToOffset>(
4245 &self,
4246 range: Range<T>,
4247 options: TreeSitterOptions,
4248 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4249 let range =
4250 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4251
4252 let mut matches =
4253 self.syntax
4254 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4255 grammar.text_object_config.as_ref().map(|c| &c.query)
4256 });
4257
4258 let configs = matches
4259 .grammars()
4260 .iter()
4261 .map(|grammar| grammar.text_object_config.as_ref())
4262 .collect::<Vec<_>>();
4263
4264 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4265
4266 iter::from_fn(move || {
4267 loop {
4268 while let Some(capture) = captures.pop() {
4269 if capture.0.overlaps(&range) {
4270 return Some(capture);
4271 }
4272 }
4273
4274 let mat = matches.peek()?;
4275
4276 let Some(config) = configs[mat.grammar_index].as_ref() else {
4277 matches.advance();
4278 continue;
4279 };
4280
4281 for capture in mat.captures {
4282 let Some(ix) = config
4283 .text_objects_by_capture_ix
4284 .binary_search_by_key(&capture.index, |e| e.0)
4285 .ok()
4286 else {
4287 continue;
4288 };
4289 let text_object = config.text_objects_by_capture_ix[ix].1;
4290 let byte_range = capture.node.byte_range();
4291
4292 let mut found = false;
4293 for (range, existing) in captures.iter_mut() {
4294 if existing == &text_object {
4295 range.start = range.start.min(byte_range.start);
4296 range.end = range.end.max(byte_range.end);
4297 found = true;
4298 break;
4299 }
4300 }
4301
4302 if !found {
4303 captures.push((byte_range, text_object));
4304 }
4305 }
4306
4307 matches.advance();
4308 }
4309 })
4310 }
4311
4312 /// Returns enclosing bracket ranges containing the given range
4313 pub fn enclosing_bracket_ranges<T: ToOffset>(
4314 &self,
4315 range: Range<T>,
4316 ) -> impl Iterator<Item = BracketMatch> + '_ {
4317 let range = range.start.to_offset(self)..range.end.to_offset(self);
4318
4319 self.bracket_ranges(range.clone()).filter(move |pair| {
4320 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4321 })
4322 }
4323
4324 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4325 ///
4326 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4327 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4328 &self,
4329 range: Range<T>,
4330 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4331 ) -> Option<(Range<usize>, Range<usize>)> {
4332 let range = range.start.to_offset(self)..range.end.to_offset(self);
4333
4334 // Get the ranges of the innermost pair of brackets.
4335 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4336
4337 for pair in self.enclosing_bracket_ranges(range) {
4338 if let Some(range_filter) = range_filter
4339 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4340 {
4341 continue;
4342 }
4343
4344 let len = pair.close_range.end - pair.open_range.start;
4345
4346 if let Some((existing_open, existing_close)) = &result {
4347 let existing_len = existing_close.end - existing_open.start;
4348 if len > existing_len {
4349 continue;
4350 }
4351 }
4352
4353 result = Some((pair.open_range, pair.close_range));
4354 }
4355
4356 result
4357 }
4358
4359 /// Returns anchor ranges for any matches of the redaction query.
4360 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4361 /// will be run on the relevant section of the buffer.
4362 pub fn redacted_ranges<T: ToOffset>(
4363 &self,
4364 range: Range<T>,
4365 ) -> impl Iterator<Item = Range<usize>> + '_ {
4366 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4367 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4368 grammar
4369 .redactions_config
4370 .as_ref()
4371 .map(|config| &config.query)
4372 });
4373
4374 let configs = syntax_matches
4375 .grammars()
4376 .iter()
4377 .map(|grammar| grammar.redactions_config.as_ref())
4378 .collect::<Vec<_>>();
4379
4380 iter::from_fn(move || {
4381 let redacted_range = syntax_matches
4382 .peek()
4383 .and_then(|mat| {
4384 configs[mat.grammar_index].and_then(|config| {
4385 mat.captures
4386 .iter()
4387 .find(|capture| capture.index == config.redaction_capture_ix)
4388 })
4389 })
4390 .map(|mat| mat.node.byte_range());
4391 syntax_matches.advance();
4392 redacted_range
4393 })
4394 }
4395
4396 pub fn injections_intersecting_range<T: ToOffset>(
4397 &self,
4398 range: Range<T>,
4399 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4400 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4401
4402 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4403 grammar
4404 .injection_config
4405 .as_ref()
4406 .map(|config| &config.query)
4407 });
4408
4409 let configs = syntax_matches
4410 .grammars()
4411 .iter()
4412 .map(|grammar| grammar.injection_config.as_ref())
4413 .collect::<Vec<_>>();
4414
4415 iter::from_fn(move || {
4416 let ranges = syntax_matches.peek().and_then(|mat| {
4417 let config = &configs[mat.grammar_index]?;
4418 let content_capture_range = mat.captures.iter().find_map(|capture| {
4419 if capture.index == config.content_capture_ix {
4420 Some(capture.node.byte_range())
4421 } else {
4422 None
4423 }
4424 })?;
4425 let language = self.language_at(content_capture_range.start)?;
4426 Some((content_capture_range, language))
4427 });
4428 syntax_matches.advance();
4429 ranges
4430 })
4431 }
4432
4433 pub fn runnable_ranges(
4434 &self,
4435 offset_range: Range<usize>,
4436 ) -> impl Iterator<Item = RunnableRange> + '_ {
4437 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4438 grammar.runnable_config.as_ref().map(|config| &config.query)
4439 });
4440
4441 let test_configs = syntax_matches
4442 .grammars()
4443 .iter()
4444 .map(|grammar| grammar.runnable_config.as_ref())
4445 .collect::<Vec<_>>();
4446
4447 iter::from_fn(move || {
4448 loop {
4449 let mat = syntax_matches.peek()?;
4450
4451 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4452 let mut run_range = None;
4453 let full_range = mat.captures.iter().fold(
4454 Range {
4455 start: usize::MAX,
4456 end: 0,
4457 },
4458 |mut acc, next| {
4459 let byte_range = next.node.byte_range();
4460 if acc.start > byte_range.start {
4461 acc.start = byte_range.start;
4462 }
4463 if acc.end < byte_range.end {
4464 acc.end = byte_range.end;
4465 }
4466 acc
4467 },
4468 );
4469 if full_range.start > full_range.end {
4470 // We did not find a full spanning range of this match.
4471 return None;
4472 }
4473 let extra_captures: SmallVec<[_; 1]> =
4474 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4475 test_configs
4476 .extra_captures
4477 .get(capture.index as usize)
4478 .cloned()
4479 .and_then(|tag_name| match tag_name {
4480 RunnableCapture::Named(name) => {
4481 Some((capture.node.byte_range(), name))
4482 }
4483 RunnableCapture::Run => {
4484 let _ = run_range.insert(capture.node.byte_range());
4485 None
4486 }
4487 })
4488 }));
4489 let run_range = run_range?;
4490 let tags = test_configs
4491 .query
4492 .property_settings(mat.pattern_index)
4493 .iter()
4494 .filter_map(|property| {
4495 if *property.key == *"tag" {
4496 property
4497 .value
4498 .as_ref()
4499 .map(|value| RunnableTag(value.to_string().into()))
4500 } else {
4501 None
4502 }
4503 })
4504 .collect();
4505 let extra_captures = extra_captures
4506 .into_iter()
4507 .map(|(range, name)| {
4508 (
4509 name.to_string(),
4510 self.text_for_range(range).collect::<String>(),
4511 )
4512 })
4513 .collect();
4514 // All tags should have the same range.
4515 Some(RunnableRange {
4516 run_range,
4517 full_range,
4518 runnable: Runnable {
4519 tags,
4520 language: mat.language,
4521 buffer: self.remote_id(),
4522 },
4523 extra_captures,
4524 buffer_id: self.remote_id(),
4525 })
4526 });
4527
4528 syntax_matches.advance();
4529 if test_range.is_some() {
4530 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4531 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4532 return test_range;
4533 }
4534 }
4535 })
4536 }
4537
4538 /// Returns selections for remote peers intersecting the given range.
4539 #[allow(clippy::type_complexity)]
4540 pub fn selections_in_range(
4541 &self,
4542 range: Range<Anchor>,
4543 include_local: bool,
4544 ) -> impl Iterator<
4545 Item = (
4546 ReplicaId,
4547 bool,
4548 CursorShape,
4549 impl Iterator<Item = &Selection<Anchor>> + '_,
4550 ),
4551 > + '_ {
4552 self.remote_selections
4553 .iter()
4554 .filter(move |(replica_id, set)| {
4555 (include_local || **replica_id != self.text.replica_id())
4556 && !set.selections.is_empty()
4557 })
4558 .map(move |(replica_id, set)| {
4559 let start_ix = match set.selections.binary_search_by(|probe| {
4560 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4561 }) {
4562 Ok(ix) | Err(ix) => ix,
4563 };
4564 let end_ix = match set.selections.binary_search_by(|probe| {
4565 probe.start.cmp(&range.end, self).then(Ordering::Less)
4566 }) {
4567 Ok(ix) | Err(ix) => ix,
4568 };
4569
4570 (
4571 *replica_id,
4572 set.line_mode,
4573 set.cursor_shape,
4574 set.selections[start_ix..end_ix].iter(),
4575 )
4576 })
4577 }
4578
4579 /// Returns if the buffer contains any diagnostics.
4580 pub fn has_diagnostics(&self) -> bool {
4581 !self.diagnostics.is_empty()
4582 }
4583
4584 /// Returns all the diagnostics intersecting the given range.
4585 pub fn diagnostics_in_range<'a, T, O>(
4586 &'a self,
4587 search_range: Range<T>,
4588 reversed: bool,
4589 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4590 where
4591 T: 'a + Clone + ToOffset,
4592 O: 'a + FromAnchor,
4593 {
4594 let mut iterators: Vec<_> = self
4595 .diagnostics
4596 .iter()
4597 .map(|(_, collection)| {
4598 collection
4599 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4600 .peekable()
4601 })
4602 .collect();
4603
4604 std::iter::from_fn(move || {
4605 let (next_ix, _) = iterators
4606 .iter_mut()
4607 .enumerate()
4608 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4609 .min_by(|(_, a), (_, b)| {
4610 let cmp = a
4611 .range
4612 .start
4613 .cmp(&b.range.start, self)
4614 // when range is equal, sort by diagnostic severity
4615 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4616 // and stabilize order with group_id
4617 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4618 if reversed { cmp.reverse() } else { cmp }
4619 })?;
4620 iterators[next_ix]
4621 .next()
4622 .map(
4623 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4624 diagnostic,
4625 range: FromAnchor::from_anchor(&range.start, self)
4626 ..FromAnchor::from_anchor(&range.end, self),
4627 },
4628 )
4629 })
4630 }
4631
4632 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4633 /// should be used instead.
4634 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4635 &self.diagnostics
4636 }
4637
4638 /// Returns all the diagnostic groups associated with the given
4639 /// language server ID. If no language server ID is provided,
4640 /// all diagnostics groups are returned.
4641 pub fn diagnostic_groups(
4642 &self,
4643 language_server_id: Option<LanguageServerId>,
4644 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4645 let mut groups = Vec::new();
4646
4647 if let Some(language_server_id) = language_server_id {
4648 if let Ok(ix) = self
4649 .diagnostics
4650 .binary_search_by_key(&language_server_id, |e| e.0)
4651 {
4652 self.diagnostics[ix]
4653 .1
4654 .groups(language_server_id, &mut groups, self);
4655 }
4656 } else {
4657 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4658 diagnostics.groups(*language_server_id, &mut groups, self);
4659 }
4660 }
4661
4662 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4663 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4664 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4665 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4666 });
4667
4668 groups
4669 }
4670
4671 /// Returns an iterator over the diagnostics for the given group.
4672 pub fn diagnostic_group<O>(
4673 &self,
4674 group_id: usize,
4675 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4676 where
4677 O: FromAnchor + 'static,
4678 {
4679 self.diagnostics
4680 .iter()
4681 .flat_map(move |(_, set)| set.group(group_id, self))
4682 }
4683
4684 /// An integer version number that accounts for all updates besides
4685 /// the buffer's text itself (which is versioned via a version vector).
4686 pub fn non_text_state_update_count(&self) -> usize {
4687 self.non_text_state_update_count
4688 }
4689
4690 /// An integer version that changes when the buffer's syntax changes.
4691 pub fn syntax_update_count(&self) -> usize {
4692 self.syntax.update_count()
4693 }
4694
4695 /// Returns a snapshot of underlying file.
4696 pub fn file(&self) -> Option<&Arc<dyn File>> {
4697 self.file.as_ref()
4698 }
4699
4700 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4701 if let Some(file) = self.file() {
4702 if file.path().file_name().is_none() || include_root {
4703 Some(file.full_path(cx).to_string_lossy().into_owned())
4704 } else {
4705 Some(file.path().display(file.path_style(cx)).to_string())
4706 }
4707 } else {
4708 None
4709 }
4710 }
4711
4712 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4713 let query_str = query.fuzzy_contents;
4714 if query_str.is_some_and(|query| query.is_empty()) {
4715 return BTreeMap::default();
4716 }
4717
4718 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4719 language,
4720 override_id: None,
4721 }));
4722
4723 let mut query_ix = 0;
4724 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4725 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4726
4727 let mut words = BTreeMap::default();
4728 let mut current_word_start_ix = None;
4729 let mut chunk_ix = query.range.start;
4730 for chunk in self.chunks(query.range, false) {
4731 for (i, c) in chunk.text.char_indices() {
4732 let ix = chunk_ix + i;
4733 if classifier.is_word(c) {
4734 if current_word_start_ix.is_none() {
4735 current_word_start_ix = Some(ix);
4736 }
4737
4738 if let Some(query_chars) = &query_chars
4739 && query_ix < query_len
4740 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4741 {
4742 query_ix += 1;
4743 }
4744 continue;
4745 } else if let Some(word_start) = current_word_start_ix.take()
4746 && query_ix == query_len
4747 {
4748 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4749 let mut word_text = self.text_for_range(word_start..ix).peekable();
4750 let first_char = word_text
4751 .peek()
4752 .and_then(|first_chunk| first_chunk.chars().next());
4753 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4754 if !query.skip_digits
4755 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4756 {
4757 words.insert(word_text.collect(), word_range);
4758 }
4759 }
4760 query_ix = 0;
4761 }
4762 chunk_ix += chunk.text.len();
4763 }
4764
4765 words
4766 }
4767}
4768
4769pub struct WordsQuery<'a> {
4770 /// Only returns words with all chars from the fuzzy string in them.
4771 pub fuzzy_contents: Option<&'a str>,
4772 /// Skips words that start with a digit.
4773 pub skip_digits: bool,
4774 /// Buffer offset range, to look for words.
4775 pub range: Range<usize>,
4776}
4777
4778fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4779 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4780}
4781
4782fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4783 let mut result = IndentSize::spaces(0);
4784 for c in text {
4785 let kind = match c {
4786 ' ' => IndentKind::Space,
4787 '\t' => IndentKind::Tab,
4788 _ => break,
4789 };
4790 if result.len == 0 {
4791 result.kind = kind;
4792 }
4793 result.len += 1;
4794 }
4795 result
4796}
4797
4798impl Clone for BufferSnapshot {
4799 fn clone(&self) -> Self {
4800 Self {
4801 text: self.text.clone(),
4802 syntax: self.syntax.clone(),
4803 file: self.file.clone(),
4804 remote_selections: self.remote_selections.clone(),
4805 diagnostics: self.diagnostics.clone(),
4806 language: self.language.clone(),
4807 non_text_state_update_count: self.non_text_state_update_count,
4808 }
4809 }
4810}
4811
4812impl Deref for BufferSnapshot {
4813 type Target = text::BufferSnapshot;
4814
4815 fn deref(&self) -> &Self::Target {
4816 &self.text
4817 }
4818}
4819
4820unsafe impl Send for BufferChunks<'_> {}
4821
4822impl<'a> BufferChunks<'a> {
4823 pub(crate) fn new(
4824 text: &'a Rope,
4825 range: Range<usize>,
4826 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4827 diagnostics: bool,
4828 buffer_snapshot: Option<&'a BufferSnapshot>,
4829 ) -> Self {
4830 let mut highlights = None;
4831 if let Some((captures, highlight_maps)) = syntax {
4832 highlights = Some(BufferChunkHighlights {
4833 captures,
4834 next_capture: None,
4835 stack: Default::default(),
4836 highlight_maps,
4837 })
4838 }
4839
4840 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4841 let chunks = text.chunks_in_range(range.clone());
4842
4843 let mut this = BufferChunks {
4844 range,
4845 buffer_snapshot,
4846 chunks,
4847 diagnostic_endpoints,
4848 error_depth: 0,
4849 warning_depth: 0,
4850 information_depth: 0,
4851 hint_depth: 0,
4852 unnecessary_depth: 0,
4853 underline: true,
4854 highlights,
4855 };
4856 this.initialize_diagnostic_endpoints();
4857 this
4858 }
4859
4860 /// Seeks to the given byte offset in the buffer.
4861 pub fn seek(&mut self, range: Range<usize>) {
4862 let old_range = std::mem::replace(&mut self.range, range.clone());
4863 self.chunks.set_range(self.range.clone());
4864 if let Some(highlights) = self.highlights.as_mut() {
4865 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4866 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4867 highlights
4868 .stack
4869 .retain(|(end_offset, _)| *end_offset > range.start);
4870 if let Some(capture) = &highlights.next_capture
4871 && range.start >= capture.node.start_byte()
4872 {
4873 let next_capture_end = capture.node.end_byte();
4874 if range.start < next_capture_end {
4875 highlights.stack.push((
4876 next_capture_end,
4877 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4878 ));
4879 }
4880 highlights.next_capture.take();
4881 }
4882 } else if let Some(snapshot) = self.buffer_snapshot {
4883 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4884 *highlights = BufferChunkHighlights {
4885 captures,
4886 next_capture: None,
4887 stack: Default::default(),
4888 highlight_maps,
4889 };
4890 } else {
4891 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4892 // Seeking such BufferChunks is not supported.
4893 debug_assert!(
4894 false,
4895 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4896 );
4897 }
4898
4899 highlights.captures.set_byte_range(self.range.clone());
4900 self.initialize_diagnostic_endpoints();
4901 }
4902 }
4903
4904 fn initialize_diagnostic_endpoints(&mut self) {
4905 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4906 && let Some(buffer) = self.buffer_snapshot
4907 {
4908 let mut diagnostic_endpoints = Vec::new();
4909 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4910 diagnostic_endpoints.push(DiagnosticEndpoint {
4911 offset: entry.range.start,
4912 is_start: true,
4913 severity: entry.diagnostic.severity,
4914 is_unnecessary: entry.diagnostic.is_unnecessary,
4915 underline: entry.diagnostic.underline,
4916 });
4917 diagnostic_endpoints.push(DiagnosticEndpoint {
4918 offset: entry.range.end,
4919 is_start: false,
4920 severity: entry.diagnostic.severity,
4921 is_unnecessary: entry.diagnostic.is_unnecessary,
4922 underline: entry.diagnostic.underline,
4923 });
4924 }
4925 diagnostic_endpoints
4926 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4927 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4928 self.hint_depth = 0;
4929 self.error_depth = 0;
4930 self.warning_depth = 0;
4931 self.information_depth = 0;
4932 }
4933 }
4934
4935 /// The current byte offset in the buffer.
4936 pub fn offset(&self) -> usize {
4937 self.range.start
4938 }
4939
4940 pub fn range(&self) -> Range<usize> {
4941 self.range.clone()
4942 }
4943
4944 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4945 let depth = match endpoint.severity {
4946 DiagnosticSeverity::ERROR => &mut self.error_depth,
4947 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4948 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4949 DiagnosticSeverity::HINT => &mut self.hint_depth,
4950 _ => return,
4951 };
4952 if endpoint.is_start {
4953 *depth += 1;
4954 } else {
4955 *depth -= 1;
4956 }
4957
4958 if endpoint.is_unnecessary {
4959 if endpoint.is_start {
4960 self.unnecessary_depth += 1;
4961 } else {
4962 self.unnecessary_depth -= 1;
4963 }
4964 }
4965 }
4966
4967 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4968 if self.error_depth > 0 {
4969 Some(DiagnosticSeverity::ERROR)
4970 } else if self.warning_depth > 0 {
4971 Some(DiagnosticSeverity::WARNING)
4972 } else if self.information_depth > 0 {
4973 Some(DiagnosticSeverity::INFORMATION)
4974 } else if self.hint_depth > 0 {
4975 Some(DiagnosticSeverity::HINT)
4976 } else {
4977 None
4978 }
4979 }
4980
4981 fn current_code_is_unnecessary(&self) -> bool {
4982 self.unnecessary_depth > 0
4983 }
4984}
4985
4986impl<'a> Iterator for BufferChunks<'a> {
4987 type Item = Chunk<'a>;
4988
4989 fn next(&mut self) -> Option<Self::Item> {
4990 let mut next_capture_start = usize::MAX;
4991 let mut next_diagnostic_endpoint = usize::MAX;
4992
4993 if let Some(highlights) = self.highlights.as_mut() {
4994 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4995 if *parent_capture_end <= self.range.start {
4996 highlights.stack.pop();
4997 } else {
4998 break;
4999 }
5000 }
5001
5002 if highlights.next_capture.is_none() {
5003 highlights.next_capture = highlights.captures.next();
5004 }
5005
5006 while let Some(capture) = highlights.next_capture.as_ref() {
5007 if self.range.start < capture.node.start_byte() {
5008 next_capture_start = capture.node.start_byte();
5009 break;
5010 } else {
5011 let highlight_id =
5012 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5013 highlights
5014 .stack
5015 .push((capture.node.end_byte(), highlight_id));
5016 highlights.next_capture = highlights.captures.next();
5017 }
5018 }
5019 }
5020
5021 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5022 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5023 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5024 if endpoint.offset <= self.range.start {
5025 self.update_diagnostic_depths(endpoint);
5026 diagnostic_endpoints.next();
5027 self.underline = endpoint.underline;
5028 } else {
5029 next_diagnostic_endpoint = endpoint.offset;
5030 break;
5031 }
5032 }
5033 }
5034 self.diagnostic_endpoints = diagnostic_endpoints;
5035
5036 if let Some(ChunkBitmaps {
5037 text: chunk,
5038 chars: chars_map,
5039 tabs,
5040 }) = self.chunks.peek_with_bitmaps()
5041 {
5042 let chunk_start = self.range.start;
5043 let mut chunk_end = (self.chunks.offset() + chunk.len())
5044 .min(next_capture_start)
5045 .min(next_diagnostic_endpoint);
5046 let mut highlight_id = None;
5047 if let Some(highlights) = self.highlights.as_ref()
5048 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5049 {
5050 chunk_end = chunk_end.min(*parent_capture_end);
5051 highlight_id = Some(*parent_highlight_id);
5052 }
5053 let bit_start = chunk_start - self.chunks.offset();
5054 let bit_end = chunk_end - self.chunks.offset();
5055
5056 let slice = &chunk[bit_start..bit_end];
5057
5058 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5059 let tabs = (tabs >> bit_start) & mask;
5060 let chars = (chars_map >> bit_start) & mask;
5061
5062 self.range.start = chunk_end;
5063 if self.range.start == self.chunks.offset() + chunk.len() {
5064 self.chunks.next().unwrap();
5065 }
5066
5067 Some(Chunk {
5068 text: slice,
5069 syntax_highlight_id: highlight_id,
5070 underline: self.underline,
5071 diagnostic_severity: self.current_diagnostic_severity(),
5072 is_unnecessary: self.current_code_is_unnecessary(),
5073 tabs,
5074 chars,
5075 ..Chunk::default()
5076 })
5077 } else {
5078 None
5079 }
5080 }
5081}
5082
5083impl operation_queue::Operation for Operation {
5084 fn lamport_timestamp(&self) -> clock::Lamport {
5085 match self {
5086 Operation::Buffer(_) => {
5087 unreachable!("buffer operations should never be deferred at this layer")
5088 }
5089 Operation::UpdateDiagnostics {
5090 lamport_timestamp, ..
5091 }
5092 | Operation::UpdateSelections {
5093 lamport_timestamp, ..
5094 }
5095 | Operation::UpdateCompletionTriggers {
5096 lamport_timestamp, ..
5097 }
5098 | Operation::UpdateLineEnding {
5099 lamport_timestamp, ..
5100 } => *lamport_timestamp,
5101 }
5102 }
5103}
5104
5105impl Default for Diagnostic {
5106 fn default() -> Self {
5107 Self {
5108 source: Default::default(),
5109 source_kind: DiagnosticSourceKind::Other,
5110 code: None,
5111 code_description: None,
5112 severity: DiagnosticSeverity::ERROR,
5113 message: Default::default(),
5114 markdown: None,
5115 group_id: 0,
5116 is_primary: false,
5117 is_disk_based: false,
5118 is_unnecessary: false,
5119 underline: true,
5120 data: None,
5121 }
5122 }
5123}
5124
5125impl IndentSize {
5126 /// Returns an [`IndentSize`] representing the given spaces.
5127 pub fn spaces(len: u32) -> Self {
5128 Self {
5129 len,
5130 kind: IndentKind::Space,
5131 }
5132 }
5133
5134 /// Returns an [`IndentSize`] representing a tab.
5135 pub fn tab() -> Self {
5136 Self {
5137 len: 1,
5138 kind: IndentKind::Tab,
5139 }
5140 }
5141
5142 /// An iterator over the characters represented by this [`IndentSize`].
5143 pub fn chars(&self) -> impl Iterator<Item = char> {
5144 iter::repeat(self.char()).take(self.len as usize)
5145 }
5146
5147 /// The character representation of this [`IndentSize`].
5148 pub fn char(&self) -> char {
5149 match self.kind {
5150 IndentKind::Space => ' ',
5151 IndentKind::Tab => '\t',
5152 }
5153 }
5154
5155 /// Consumes the current [`IndentSize`] and returns a new one that has
5156 /// been shrunk or enlarged by the given size along the given direction.
5157 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5158 match direction {
5159 Ordering::Less => {
5160 if self.kind == size.kind && self.len >= size.len {
5161 self.len -= size.len;
5162 }
5163 }
5164 Ordering::Equal => {}
5165 Ordering::Greater => {
5166 if self.len == 0 {
5167 self = size;
5168 } else if self.kind == size.kind {
5169 self.len += size.len;
5170 }
5171 }
5172 }
5173 self
5174 }
5175
5176 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5177 match self.kind {
5178 IndentKind::Space => self.len as usize,
5179 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5180 }
5181 }
5182}
5183
5184#[cfg(any(test, feature = "test-support"))]
5185pub struct TestFile {
5186 pub path: Arc<RelPath>,
5187 pub root_name: String,
5188 pub local_root: Option<PathBuf>,
5189}
5190
5191#[cfg(any(test, feature = "test-support"))]
5192impl File for TestFile {
5193 fn path(&self) -> &Arc<RelPath> {
5194 &self.path
5195 }
5196
5197 fn full_path(&self, _: &gpui::App) -> PathBuf {
5198 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5199 }
5200
5201 fn as_local(&self) -> Option<&dyn LocalFile> {
5202 if self.local_root.is_some() {
5203 Some(self)
5204 } else {
5205 None
5206 }
5207 }
5208
5209 fn disk_state(&self) -> DiskState {
5210 unimplemented!()
5211 }
5212
5213 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5214 self.path().file_name().unwrap_or(self.root_name.as_ref())
5215 }
5216
5217 fn worktree_id(&self, _: &App) -> WorktreeId {
5218 WorktreeId::from_usize(0)
5219 }
5220
5221 fn to_proto(&self, _: &App) -> rpc::proto::File {
5222 unimplemented!()
5223 }
5224
5225 fn is_private(&self) -> bool {
5226 false
5227 }
5228
5229 fn path_style(&self, _cx: &App) -> PathStyle {
5230 PathStyle::local()
5231 }
5232}
5233
5234#[cfg(any(test, feature = "test-support"))]
5235impl LocalFile for TestFile {
5236 fn abs_path(&self, _cx: &App) -> PathBuf {
5237 PathBuf::from(self.local_root.as_ref().unwrap())
5238 .join(&self.root_name)
5239 .join(self.path.as_std_path())
5240 }
5241
5242 fn load(&self, _cx: &App) -> Task<Result<String>> {
5243 unimplemented!()
5244 }
5245
5246 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5247 unimplemented!()
5248 }
5249
5250 fn load_with_encoding(
5251 &self,
5252 _: &App,
5253 _: &'static Encoding,
5254 _: bool, // whether to force the encoding even if a BOM is present
5255 _: Arc<std::sync::Mutex<&'static Encoding>>,
5256 ) -> Task<Result<String>> {
5257 unimplemented!()
5258 }
5259}
5260
5261pub(crate) fn contiguous_ranges(
5262 values: impl Iterator<Item = u32>,
5263 max_len: usize,
5264) -> impl Iterator<Item = Range<u32>> {
5265 let mut values = values;
5266 let mut current_range: Option<Range<u32>> = None;
5267 std::iter::from_fn(move || {
5268 loop {
5269 if let Some(value) = values.next() {
5270 if let Some(range) = &mut current_range
5271 && value == range.end
5272 && range.len() < max_len
5273 {
5274 range.end += 1;
5275 continue;
5276 }
5277
5278 let prev_range = current_range.clone();
5279 current_range = Some(value..(value + 1));
5280 if prev_range.is_some() {
5281 return prev_range;
5282 }
5283 } else {
5284 return current_range.take();
5285 }
5286 }
5287 })
5288}
5289
5290#[derive(Default, Debug)]
5291pub struct CharClassifier {
5292 scope: Option<LanguageScope>,
5293 scope_context: Option<CharScopeContext>,
5294 ignore_punctuation: bool,
5295}
5296
5297impl CharClassifier {
5298 pub fn new(scope: Option<LanguageScope>) -> Self {
5299 Self {
5300 scope,
5301 scope_context: None,
5302 ignore_punctuation: false,
5303 }
5304 }
5305
5306 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5307 Self {
5308 scope_context,
5309 ..self
5310 }
5311 }
5312
5313 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5314 Self {
5315 ignore_punctuation,
5316 ..self
5317 }
5318 }
5319
5320 pub fn is_whitespace(&self, c: char) -> bool {
5321 self.kind(c) == CharKind::Whitespace
5322 }
5323
5324 pub fn is_word(&self, c: char) -> bool {
5325 self.kind(c) == CharKind::Word
5326 }
5327
5328 pub fn is_punctuation(&self, c: char) -> bool {
5329 self.kind(c) == CharKind::Punctuation
5330 }
5331
5332 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5333 if c.is_alphanumeric() || c == '_' {
5334 return CharKind::Word;
5335 }
5336
5337 if let Some(scope) = &self.scope {
5338 let characters = match self.scope_context {
5339 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5340 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5341 None => scope.word_characters(),
5342 };
5343 if let Some(characters) = characters
5344 && characters.contains(&c)
5345 {
5346 return CharKind::Word;
5347 }
5348 }
5349
5350 if c.is_whitespace() {
5351 return CharKind::Whitespace;
5352 }
5353
5354 if ignore_punctuation {
5355 CharKind::Word
5356 } else {
5357 CharKind::Punctuation
5358 }
5359 }
5360
5361 pub fn kind(&self, c: char) -> CharKind {
5362 self.kind_with(c, self.ignore_punctuation)
5363 }
5364}
5365
5366/// Find all of the ranges of whitespace that occur at the ends of lines
5367/// in the given rope.
5368///
5369/// This could also be done with a regex search, but this implementation
5370/// avoids copying text.
5371pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5372 let mut ranges = Vec::new();
5373
5374 let mut offset = 0;
5375 let mut prev_chunk_trailing_whitespace_range = 0..0;
5376 for chunk in rope.chunks() {
5377 let mut prev_line_trailing_whitespace_range = 0..0;
5378 for (i, line) in chunk.split('\n').enumerate() {
5379 let line_end_offset = offset + line.len();
5380 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5381 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5382
5383 if i == 0 && trimmed_line_len == 0 {
5384 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5385 }
5386 if !prev_line_trailing_whitespace_range.is_empty() {
5387 ranges.push(prev_line_trailing_whitespace_range);
5388 }
5389
5390 offset = line_end_offset + 1;
5391 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5392 }
5393
5394 offset -= 1;
5395 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5396 }
5397
5398 if !prev_chunk_trailing_whitespace_range.is_empty() {
5399 ranges.push(prev_chunk_trailing_whitespace_range);
5400 }
5401
5402 ranges
5403}