1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
28 SharedString, StyledText, Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, String)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
751
752 let unchanged_range_in_preview_snapshot =
753 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
754 if !unchanged_range_in_preview_snapshot.is_empty() {
755 highlighted_text.add_text_from_buffer_range(
756 unchanged_range_in_preview_snapshot,
757 &self.applied_edits_snapshot,
758 &self.syntax_snapshot,
759 None,
760 syntax_theme,
761 );
762 }
763
764 let range_in_current_snapshot = range.to_offset(current_snapshot);
765 if include_deletions && !range_in_current_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 range_in_current_snapshot,
768 ¤t_snapshot.text,
769 ¤t_snapshot.syntax,
770 Some(deletion_highlight_style),
771 syntax_theme,
772 );
773 }
774
775 if !edit_text.is_empty() {
776 highlighted_text.add_text_from_buffer_range(
777 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
778 &self.applied_edits_snapshot,
779 &self.syntax_snapshot,
780 Some(insertion_highlight_style),
781 syntax_theme,
782 );
783 }
784
785 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
786 }
787
788 highlighted_text.add_text_from_buffer_range(
789 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
790 &self.applied_edits_snapshot,
791 &self.syntax_snapshot,
792 None,
793 syntax_theme,
794 );
795
796 highlighted_text.build()
797 }
798
799 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
800 let (first, _) = edits.first()?;
801 let (last, _) = edits.last()?;
802
803 let start = first
804 .start
805 .bias_left(&self.old_snapshot)
806 .to_point(&self.applied_edits_snapshot);
807 let end = last
808 .end
809 .bias_right(&self.old_snapshot)
810 .to_point(&self.applied_edits_snapshot);
811
812 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
813 let range = Point::new(start.row, 0)
814 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
815
816 Some(range.to_offset(&self.applied_edits_snapshot))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821pub struct BracketMatch {
822 pub open_range: Range<usize>,
823 pub close_range: Range<usize>,
824 pub newline_only: bool,
825}
826
827impl Buffer {
828 /// Create a new buffer with the given base text.
829 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
830 Self::build(
831 TextBuffer::new(
832 ReplicaId::LOCAL,
833 cx.entity_id().as_non_zero_u64().into(),
834 base_text.into(),
835 &cx.background_executor(),
836 ),
837 None,
838 Capability::ReadWrite,
839 )
840 }
841
842 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
843 pub fn local_normalized(
844 base_text_normalized: Rope,
845 line_ending: LineEnding,
846 cx: &Context<Self>,
847 ) -> Self {
848 Self::build(
849 TextBuffer::new_normalized(
850 ReplicaId::LOCAL,
851 cx.entity_id().as_non_zero_u64().into(),
852 line_ending,
853 base_text_normalized,
854 ),
855 None,
856 Capability::ReadWrite,
857 )
858 }
859
860 /// Create a new buffer that is a replica of a remote buffer.
861 pub fn remote(
862 remote_id: BufferId,
863 replica_id: ReplicaId,
864 capability: Capability,
865 base_text: impl Into<String>,
866 cx: &BackgroundExecutor,
867 ) -> Self {
868 Self::build(
869 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
870 None,
871 capability,
872 )
873 }
874
875 /// Create a new buffer that is a replica of a remote buffer, populating its
876 /// state from the given protobuf message.
877 pub fn from_proto(
878 replica_id: ReplicaId,
879 capability: Capability,
880 message: proto::BufferState,
881 file: Option<Arc<dyn File>>,
882 cx: &BackgroundExecutor,
883 ) -> Result<Self> {
884 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
885 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
886 let mut this = Self::build(buffer, file, capability);
887 this.text.set_line_ending(proto::deserialize_line_ending(
888 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
889 ));
890 this.saved_version = proto::deserialize_version(&message.saved_version);
891 this.saved_mtime = message.saved_mtime.map(|time| time.into());
892 Ok(this)
893 }
894
895 /// Serialize the buffer's state to a protobuf message.
896 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
897 proto::BufferState {
898 id: self.remote_id().into(),
899 file: self.file.as_ref().map(|f| f.to_proto(cx)),
900 base_text: self.base_text().to_string(),
901 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
902 saved_version: proto::serialize_version(&self.saved_version),
903 saved_mtime: self.saved_mtime.map(|time| time.into()),
904 }
905 }
906
907 /// Serialize as protobufs all of the changes to the buffer since the given version.
908 pub fn serialize_ops(
909 &self,
910 since: Option<clock::Global>,
911 cx: &App,
912 ) -> Task<Vec<proto::Operation>> {
913 let mut operations = Vec::new();
914 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
915
916 operations.extend(self.remote_selections.iter().map(|(_, set)| {
917 proto::serialize_operation(&Operation::UpdateSelections {
918 selections: set.selections.clone(),
919 lamport_timestamp: set.lamport_timestamp,
920 line_mode: set.line_mode,
921 cursor_shape: set.cursor_shape,
922 })
923 }));
924
925 for (server_id, diagnostics) in &self.diagnostics {
926 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
927 lamport_timestamp: self.diagnostics_timestamp,
928 server_id: *server_id,
929 diagnostics: diagnostics.iter().cloned().collect(),
930 }));
931 }
932
933 for (server_id, completions) in &self.completion_triggers_per_language_server {
934 operations.push(proto::serialize_operation(
935 &Operation::UpdateCompletionTriggers {
936 triggers: completions.iter().cloned().collect(),
937 lamport_timestamp: self.completion_triggers_timestamp,
938 server_id: *server_id,
939 },
940 ));
941 }
942
943 let text_operations = self.text.operations().clone();
944 cx.background_spawn(async move {
945 let since = since.unwrap_or_default();
946 operations.extend(
947 text_operations
948 .iter()
949 .filter(|(_, op)| !since.observed(op.timestamp()))
950 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
951 );
952 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
953 operations
954 })
955 }
956
957 /// Assign a language to the buffer, returning the buffer.
958 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
959 self.set_language(Some(language), cx);
960 self
961 }
962
963 /// Returns the [`Capability`] of this buffer.
964 pub fn capability(&self) -> Capability {
965 self.capability
966 }
967
968 /// Whether this buffer can only be read.
969 pub fn read_only(&self) -> bool {
970 self.capability == Capability::ReadOnly
971 }
972
973 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
974 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
975 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
976 let snapshot = buffer.snapshot();
977 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
978 Self {
979 saved_mtime,
980 saved_version: buffer.version(),
981 preview_version: buffer.version(),
982 reload_task: None,
983 transaction_depth: 0,
984 was_dirty_before_starting_transaction: None,
985 has_unsaved_edits: Cell::new((buffer.version(), false)),
986 text: buffer,
987 branch_state: None,
988 file,
989 capability,
990 syntax_map,
991 reparse: None,
992 non_text_state_update_count: 0,
993 sync_parse_timeout: Duration::from_millis(1),
994 parse_status: watch::channel(ParseStatus::Idle),
995 autoindent_requests: Default::default(),
996 wait_for_autoindent_txs: Default::default(),
997 pending_autoindent: Default::default(),
998 language: None,
999 remote_selections: Default::default(),
1000 diagnostics: Default::default(),
1001 diagnostics_timestamp: Lamport::MIN,
1002 completion_triggers: Default::default(),
1003 completion_triggers_per_language_server: Default::default(),
1004 completion_triggers_timestamp: Lamport::MIN,
1005 deferred_ops: OperationQueue::new(),
1006 has_conflict: false,
1007 change_bits: Default::default(),
1008 _subscriptions: Vec::new(),
1009 }
1010 }
1011
1012 pub fn build_snapshot(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> impl Future<Output = BufferSnapshot> + use<> {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 async move {
1021 let text =
1022 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1023 .snapshot();
1024 let mut syntax = SyntaxMap::new(&text).snapshot();
1025 if let Some(language) = language.clone() {
1026 let language_registry = language_registry.clone();
1027 syntax.reparse(&text, language_registry, language);
1028 }
1029 BufferSnapshot {
1030 text,
1031 syntax,
1032 file: None,
1033 diagnostics: Default::default(),
1034 remote_selections: Default::default(),
1035 language,
1036 non_text_state_update_count: 0,
1037 }
1038 }
1039 }
1040
1041 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1042 let entity_id = cx.reserve_entity::<Self>().entity_id();
1043 let buffer_id = entity_id.as_non_zero_u64().into();
1044 let text = TextBuffer::new_normalized(
1045 ReplicaId::LOCAL,
1046 buffer_id,
1047 Default::default(),
1048 Rope::new(),
1049 )
1050 .snapshot();
1051 let syntax = SyntaxMap::new(&text).snapshot();
1052 BufferSnapshot {
1053 text,
1054 syntax,
1055 file: None,
1056 diagnostics: Default::default(),
1057 remote_selections: Default::default(),
1058 language: None,
1059 non_text_state_update_count: 0,
1060 }
1061 }
1062
1063 #[cfg(any(test, feature = "test-support"))]
1064 pub fn build_snapshot_sync(
1065 text: Rope,
1066 language: Option<Arc<Language>>,
1067 language_registry: Option<Arc<LanguageRegistry>>,
1068 cx: &mut App,
1069 ) -> BufferSnapshot {
1070 let entity_id = cx.reserve_entity::<Self>().entity_id();
1071 let buffer_id = entity_id.as_non_zero_u64().into();
1072 let text =
1073 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1074 .snapshot();
1075 let mut syntax = SyntaxMap::new(&text).snapshot();
1076 if let Some(language) = language.clone() {
1077 syntax.reparse(&text, language_registry, language);
1078 }
1079 BufferSnapshot {
1080 text,
1081 syntax,
1082 file: None,
1083 diagnostics: Default::default(),
1084 remote_selections: Default::default(),
1085 language,
1086 non_text_state_update_count: 0,
1087 }
1088 }
1089
1090 /// Retrieve a snapshot of the buffer's current state. This is computationally
1091 /// cheap, and allows reading from the buffer on a background thread.
1092 pub fn snapshot(&self) -> BufferSnapshot {
1093 let text = self.text.snapshot();
1094 let mut syntax_map = self.syntax_map.lock();
1095 syntax_map.interpolate(&text);
1096 let syntax = syntax_map.snapshot();
1097
1098 BufferSnapshot {
1099 text,
1100 syntax,
1101 file: self.file.clone(),
1102 remote_selections: self.remote_selections.clone(),
1103 diagnostics: self.diagnostics.clone(),
1104 language: self.language.clone(),
1105 non_text_state_update_count: self.non_text_state_update_count,
1106 }
1107 }
1108
1109 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1110 let this = cx.entity();
1111 cx.new(|cx| {
1112 let mut branch = Self {
1113 branch_state: Some(BufferBranchState {
1114 base_buffer: this.clone(),
1115 merged_operations: Default::default(),
1116 }),
1117 language: self.language.clone(),
1118 has_conflict: self.has_conflict,
1119 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1120 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1121 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1122 };
1123 if let Some(language_registry) = self.language_registry() {
1124 branch.set_language_registry(language_registry);
1125 }
1126
1127 // Reparse the branch buffer so that we get syntax highlighting immediately.
1128 branch.reparse(cx);
1129
1130 branch
1131 })
1132 }
1133
1134 pub fn preview_edits(
1135 &self,
1136 edits: Arc<[(Range<Anchor>, String)]>,
1137 cx: &App,
1138 ) -> Task<EditPreview> {
1139 let registry = self.language_registry();
1140 let language = self.language().cloned();
1141 let old_snapshot = self.text.snapshot();
1142 let mut branch_buffer = self.text.branch();
1143 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1144 let executor = cx.background_executor().clone();
1145 cx.background_spawn(async move {
1146 if !edits.is_empty() {
1147 if let Some(language) = language.clone() {
1148 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1149 }
1150
1151 branch_buffer.edit(edits.iter().cloned(), &executor);
1152 let snapshot = branch_buffer.snapshot();
1153 syntax_snapshot.interpolate(&snapshot);
1154
1155 if let Some(language) = language {
1156 syntax_snapshot.reparse(&snapshot, registry, language);
1157 }
1158 }
1159 EditPreview {
1160 old_snapshot,
1161 applied_edits_snapshot: branch_buffer.snapshot(),
1162 syntax_snapshot,
1163 }
1164 })
1165 }
1166
1167 /// Applies all of the changes in this buffer that intersect any of the
1168 /// given `ranges` to its base buffer.
1169 ///
1170 /// If `ranges` is empty, then all changes will be applied. This buffer must
1171 /// be a branch buffer to call this method.
1172 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1173 let Some(base_buffer) = self.base_buffer() else {
1174 debug_panic!("not a branch buffer");
1175 return;
1176 };
1177
1178 let mut ranges = if ranges.is_empty() {
1179 &[0..usize::MAX]
1180 } else {
1181 ranges.as_slice()
1182 }
1183 .iter()
1184 .peekable();
1185
1186 let mut edits = Vec::new();
1187 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1188 let mut is_included = false;
1189 while let Some(range) = ranges.peek() {
1190 if range.end < edit.new.start {
1191 ranges.next().unwrap();
1192 } else {
1193 if range.start <= edit.new.end {
1194 is_included = true;
1195 }
1196 break;
1197 }
1198 }
1199
1200 if is_included {
1201 edits.push((
1202 edit.old.clone(),
1203 self.text_for_range(edit.new.clone()).collect::<String>(),
1204 ));
1205 }
1206 }
1207
1208 let operation = base_buffer.update(cx, |base_buffer, cx| {
1209 // cx.emit(BufferEvent::DiffBaseChanged);
1210 base_buffer.edit(edits, None, cx)
1211 });
1212
1213 if let Some(operation) = operation
1214 && let Some(BufferBranchState {
1215 merged_operations, ..
1216 }) = &mut self.branch_state
1217 {
1218 merged_operations.push(operation);
1219 }
1220 }
1221
1222 fn on_base_buffer_event(
1223 &mut self,
1224 _: Entity<Buffer>,
1225 event: &BufferEvent,
1226 cx: &mut Context<Self>,
1227 ) {
1228 let BufferEvent::Operation { operation, .. } = event else {
1229 return;
1230 };
1231 let Some(BufferBranchState {
1232 merged_operations, ..
1233 }) = &mut self.branch_state
1234 else {
1235 return;
1236 };
1237
1238 let mut operation_to_undo = None;
1239 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1240 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1241 {
1242 merged_operations.remove(ix);
1243 operation_to_undo = Some(operation.timestamp);
1244 }
1245
1246 self.apply_ops([operation.clone()], cx);
1247
1248 if let Some(timestamp) = operation_to_undo {
1249 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1250 self.undo_operations(counts, cx);
1251 }
1252 }
1253
1254 #[cfg(test)]
1255 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1256 &self.text
1257 }
1258
1259 /// Retrieve a snapshot of the buffer's raw text, without any
1260 /// language-related state like the syntax tree or diagnostics.
1261 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1262 self.text.snapshot()
1263 }
1264
1265 /// The file associated with the buffer, if any.
1266 pub fn file(&self) -> Option<&Arc<dyn File>> {
1267 self.file.as_ref()
1268 }
1269
1270 /// The version of the buffer that was last saved or reloaded from disk.
1271 pub fn saved_version(&self) -> &clock::Global {
1272 &self.saved_version
1273 }
1274
1275 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1276 pub fn saved_mtime(&self) -> Option<MTime> {
1277 self.saved_mtime
1278 }
1279
1280 /// Assign a language to the buffer.
1281 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1282 self.non_text_state_update_count += 1;
1283 self.syntax_map.lock().clear(&self.text);
1284 self.language = language;
1285 self.was_changed();
1286 self.reparse(cx);
1287 cx.emit(BufferEvent::LanguageChanged);
1288 }
1289
1290 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1291 /// other languages if parts of the buffer are written in different languages.
1292 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1293 self.syntax_map
1294 .lock()
1295 .set_language_registry(language_registry);
1296 }
1297
1298 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1299 self.syntax_map.lock().language_registry()
1300 }
1301
1302 /// Assign the line ending type to the buffer.
1303 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1304 self.text.set_line_ending(line_ending);
1305
1306 let lamport_timestamp = self.text.lamport_clock.tick();
1307 self.send_operation(
1308 Operation::UpdateLineEnding {
1309 line_ending,
1310 lamport_timestamp,
1311 },
1312 true,
1313 cx,
1314 );
1315 }
1316
1317 /// Assign the buffer a new [`Capability`].
1318 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1319 if self.capability != capability {
1320 self.capability = capability;
1321 cx.emit(BufferEvent::CapabilityChanged)
1322 }
1323 }
1324
1325 /// This method is called to signal that the buffer has been saved.
1326 pub fn did_save(
1327 &mut self,
1328 version: clock::Global,
1329 mtime: Option<MTime>,
1330 cx: &mut Context<Self>,
1331 ) {
1332 self.saved_version = version.clone();
1333 self.has_unsaved_edits.set((version, false));
1334 self.has_conflict = false;
1335 self.saved_mtime = mtime;
1336 self.was_changed();
1337 cx.emit(BufferEvent::Saved);
1338 cx.notify();
1339 }
1340
1341 /// Reloads the contents of the buffer from disk.
1342 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1343 let (tx, rx) = futures::channel::oneshot::channel();
1344 let prev_version = self.text.version();
1345 self.reload_task = Some(cx.spawn(async move |this, cx| {
1346 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1347 let file = this.file.as_ref()?.as_local()?;
1348
1349 Some((file.disk_state().mtime(), file.load(cx)))
1350 })?
1351 else {
1352 return Ok(());
1353 };
1354
1355 let new_text = new_text.await?;
1356 let diff = this
1357 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1358 .await;
1359 this.update(cx, |this, cx| {
1360 if this.version() == diff.base_version {
1361 this.finalize_last_transaction();
1362 this.apply_diff(diff, cx);
1363 tx.send(this.finalize_last_transaction().cloned()).ok();
1364 this.has_conflict = false;
1365 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1366 } else {
1367 if !diff.edits.is_empty()
1368 || this
1369 .edits_since::<usize>(&diff.base_version)
1370 .next()
1371 .is_some()
1372 {
1373 this.has_conflict = true;
1374 }
1375
1376 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1377 }
1378
1379 this.reload_task.take();
1380 })
1381 }));
1382 rx
1383 }
1384
1385 /// This method is called to signal that the buffer has been reloaded.
1386 pub fn did_reload(
1387 &mut self,
1388 version: clock::Global,
1389 line_ending: LineEnding,
1390 mtime: Option<MTime>,
1391 cx: &mut Context<Self>,
1392 ) {
1393 self.saved_version = version;
1394 self.has_unsaved_edits
1395 .set((self.saved_version.clone(), false));
1396 self.text.set_line_ending(line_ending);
1397 self.saved_mtime = mtime;
1398 cx.emit(BufferEvent::Reloaded);
1399 cx.notify();
1400 }
1401
1402 /// Updates the [`File`] backing this buffer. This should be called when
1403 /// the file has changed or has been deleted.
1404 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1405 let was_dirty = self.is_dirty();
1406 let mut file_changed = false;
1407
1408 if let Some(old_file) = self.file.as_ref() {
1409 if new_file.path() != old_file.path() {
1410 file_changed = true;
1411 }
1412
1413 let old_state = old_file.disk_state();
1414 let new_state = new_file.disk_state();
1415 if old_state != new_state {
1416 file_changed = true;
1417 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1418 cx.emit(BufferEvent::ReloadNeeded)
1419 }
1420 }
1421 } else {
1422 file_changed = true;
1423 };
1424
1425 self.file = Some(new_file);
1426 if file_changed {
1427 self.was_changed();
1428 self.non_text_state_update_count += 1;
1429 if was_dirty != self.is_dirty() {
1430 cx.emit(BufferEvent::DirtyChanged);
1431 }
1432 cx.emit(BufferEvent::FileHandleChanged);
1433 cx.notify();
1434 }
1435 }
1436
1437 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1438 Some(self.branch_state.as_ref()?.base_buffer.clone())
1439 }
1440
1441 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1442 pub fn language(&self) -> Option<&Arc<Language>> {
1443 self.language.as_ref()
1444 }
1445
1446 /// Returns the [`Language`] at the given location.
1447 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1448 let offset = position.to_offset(self);
1449 let mut is_first = true;
1450 let start_anchor = self.anchor_before(offset);
1451 let end_anchor = self.anchor_after(offset);
1452 self.syntax_map
1453 .lock()
1454 .layers_for_range(offset..offset, &self.text, false)
1455 .filter(|layer| {
1456 if is_first {
1457 is_first = false;
1458 return true;
1459 }
1460
1461 layer
1462 .included_sub_ranges
1463 .map(|sub_ranges| {
1464 sub_ranges.iter().any(|sub_range| {
1465 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1466 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1467 !is_before_start && !is_after_end
1468 })
1469 })
1470 .unwrap_or(true)
1471 })
1472 .last()
1473 .map(|info| info.language.clone())
1474 .or_else(|| self.language.clone())
1475 }
1476
1477 /// Returns each [`Language`] for the active syntax layers at the given location.
1478 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1479 let offset = position.to_offset(self);
1480 let mut languages: Vec<Arc<Language>> = self
1481 .syntax_map
1482 .lock()
1483 .layers_for_range(offset..offset, &self.text, false)
1484 .map(|info| info.language.clone())
1485 .collect();
1486
1487 if languages.is_empty()
1488 && let Some(buffer_language) = self.language()
1489 {
1490 languages.push(buffer_language.clone());
1491 }
1492
1493 languages
1494 }
1495
1496 /// An integer version number that accounts for all updates besides
1497 /// the buffer's text itself (which is versioned via a version vector).
1498 pub fn non_text_state_update_count(&self) -> usize {
1499 self.non_text_state_update_count
1500 }
1501
1502 /// Whether the buffer is being parsed in the background.
1503 #[cfg(any(test, feature = "test-support"))]
1504 pub fn is_parsing(&self) -> bool {
1505 self.reparse.is_some()
1506 }
1507
1508 /// Indicates whether the buffer contains any regions that may be
1509 /// written in a language that hasn't been loaded yet.
1510 pub fn contains_unknown_injections(&self) -> bool {
1511 self.syntax_map.lock().contains_unknown_injections()
1512 }
1513
1514 #[cfg(any(test, feature = "test-support"))]
1515 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1516 self.sync_parse_timeout = timeout;
1517 }
1518
1519 /// Called after an edit to synchronize the buffer's main parse tree with
1520 /// the buffer's new underlying state.
1521 ///
1522 /// Locks the syntax map and interpolates the edits since the last reparse
1523 /// into the foreground syntax tree.
1524 ///
1525 /// Then takes a stable snapshot of the syntax map before unlocking it.
1526 /// The snapshot with the interpolated edits is sent to a background thread,
1527 /// where we ask Tree-sitter to perform an incremental parse.
1528 ///
1529 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1530 /// waiting on the parse to complete. As soon as it completes, we proceed
1531 /// synchronously, unless a 1ms timeout elapses.
1532 ///
1533 /// If we time out waiting on the parse, we spawn a second task waiting
1534 /// until the parse does complete and return with the interpolated tree still
1535 /// in the foreground. When the background parse completes, call back into
1536 /// the main thread and assign the foreground parse state.
1537 ///
1538 /// If the buffer or grammar changed since the start of the background parse,
1539 /// initiate an additional reparse recursively. To avoid concurrent parses
1540 /// for the same buffer, we only initiate a new parse if we are not already
1541 /// parsing in the background.
1542 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1543 if self.reparse.is_some() {
1544 return;
1545 }
1546 let language = if let Some(language) = self.language.clone() {
1547 language
1548 } else {
1549 return;
1550 };
1551
1552 let text = self.text_snapshot();
1553 let parsed_version = self.version();
1554
1555 let mut syntax_map = self.syntax_map.lock();
1556 syntax_map.interpolate(&text);
1557 let language_registry = syntax_map.language_registry();
1558 let mut syntax_snapshot = syntax_map.snapshot();
1559 drop(syntax_map);
1560
1561 let parse_task = cx.background_spawn({
1562 let language = language.clone();
1563 let language_registry = language_registry.clone();
1564 async move {
1565 syntax_snapshot.reparse(&text, language_registry, language);
1566 syntax_snapshot
1567 }
1568 });
1569
1570 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1571 match cx
1572 .background_executor()
1573 .block_with_timeout(self.sync_parse_timeout, parse_task)
1574 {
1575 Ok(new_syntax_snapshot) => {
1576 self.did_finish_parsing(new_syntax_snapshot, cx);
1577 self.reparse = None;
1578 }
1579 Err(parse_task) => {
1580 self.reparse = Some(cx.spawn(async move |this, cx| {
1581 let new_syntax_map = parse_task.await;
1582 this.update(cx, move |this, cx| {
1583 let grammar_changed =
1584 this.language.as_ref().is_none_or(|current_language| {
1585 !Arc::ptr_eq(&language, current_language)
1586 });
1587 let language_registry_changed = new_syntax_map
1588 .contains_unknown_injections()
1589 && language_registry.is_some_and(|registry| {
1590 registry.version() != new_syntax_map.language_registry_version()
1591 });
1592 let parse_again = language_registry_changed
1593 || grammar_changed
1594 || this.version.changed_since(&parsed_version);
1595 this.did_finish_parsing(new_syntax_map, cx);
1596 this.reparse = None;
1597 if parse_again {
1598 this.reparse(cx);
1599 }
1600 })
1601 .ok();
1602 }));
1603 }
1604 }
1605 }
1606
1607 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1608 self.was_changed();
1609 self.non_text_state_update_count += 1;
1610 self.syntax_map.lock().did_parse(syntax_snapshot);
1611 self.request_autoindent(cx);
1612 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1613 cx.emit(BufferEvent::Reparsed);
1614 cx.notify();
1615 }
1616
1617 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1618 self.parse_status.1.clone()
1619 }
1620
1621 /// Assign to the buffer a set of diagnostics created by a given language server.
1622 pub fn update_diagnostics(
1623 &mut self,
1624 server_id: LanguageServerId,
1625 diagnostics: DiagnosticSet,
1626 cx: &mut Context<Self>,
1627 ) {
1628 let lamport_timestamp = self.text.lamport_clock.tick();
1629 let op = Operation::UpdateDiagnostics {
1630 server_id,
1631 diagnostics: diagnostics.iter().cloned().collect(),
1632 lamport_timestamp,
1633 };
1634
1635 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1636 self.send_operation(op, true, cx);
1637 }
1638
1639 pub fn buffer_diagnostics(
1640 &self,
1641 for_server: Option<LanguageServerId>,
1642 ) -> Vec<&DiagnosticEntry<Anchor>> {
1643 match for_server {
1644 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1645 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1646 Err(_) => Vec::new(),
1647 },
1648 None => self
1649 .diagnostics
1650 .iter()
1651 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1652 .collect(),
1653 }
1654 }
1655
1656 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1657 if let Some(indent_sizes) = self.compute_autoindents() {
1658 let indent_sizes = cx.background_spawn(indent_sizes);
1659 match cx
1660 .background_executor()
1661 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1662 {
1663 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1664 Err(indent_sizes) => {
1665 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1666 let indent_sizes = indent_sizes.await;
1667 this.update(cx, |this, cx| {
1668 this.apply_autoindents(indent_sizes, cx);
1669 })
1670 .ok();
1671 }));
1672 }
1673 }
1674 } else {
1675 self.autoindent_requests.clear();
1676 for tx in self.wait_for_autoindent_txs.drain(..) {
1677 tx.send(()).ok();
1678 }
1679 }
1680 }
1681
1682 fn compute_autoindents(
1683 &self,
1684 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1685 let max_rows_between_yields = 100;
1686 let snapshot = self.snapshot();
1687 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1688 return None;
1689 }
1690
1691 let autoindent_requests = self.autoindent_requests.clone();
1692 Some(async move {
1693 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1694 for request in autoindent_requests {
1695 // Resolve each edited range to its row in the current buffer and in the
1696 // buffer before this batch of edits.
1697 let mut row_ranges = Vec::new();
1698 let mut old_to_new_rows = BTreeMap::new();
1699 let mut language_indent_sizes_by_new_row = Vec::new();
1700 for entry in &request.entries {
1701 let position = entry.range.start;
1702 let new_row = position.to_point(&snapshot).row;
1703 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1704 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1705
1706 if !entry.first_line_is_new {
1707 let old_row = position.to_point(&request.before_edit).row;
1708 old_to_new_rows.insert(old_row, new_row);
1709 }
1710 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1711 }
1712
1713 // Build a map containing the suggested indentation for each of the edited lines
1714 // with respect to the state of the buffer before these edits. This map is keyed
1715 // by the rows for these lines in the current state of the buffer.
1716 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1717 let old_edited_ranges =
1718 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1719 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1720 let mut language_indent_size = IndentSize::default();
1721 for old_edited_range in old_edited_ranges {
1722 let suggestions = request
1723 .before_edit
1724 .suggest_autoindents(old_edited_range.clone())
1725 .into_iter()
1726 .flatten();
1727 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1728 if let Some(suggestion) = suggestion {
1729 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1730
1731 // Find the indent size based on the language for this row.
1732 while let Some((row, size)) = language_indent_sizes.peek() {
1733 if *row > new_row {
1734 break;
1735 }
1736 language_indent_size = *size;
1737 language_indent_sizes.next();
1738 }
1739
1740 let suggested_indent = old_to_new_rows
1741 .get(&suggestion.basis_row)
1742 .and_then(|from_row| {
1743 Some(old_suggestions.get(from_row).copied()?.0)
1744 })
1745 .unwrap_or_else(|| {
1746 request
1747 .before_edit
1748 .indent_size_for_line(suggestion.basis_row)
1749 })
1750 .with_delta(suggestion.delta, language_indent_size);
1751 old_suggestions
1752 .insert(new_row, (suggested_indent, suggestion.within_error));
1753 }
1754 }
1755 yield_now().await;
1756 }
1757
1758 // Compute new suggestions for each line, but only include them in the result
1759 // if they differ from the old suggestion for that line.
1760 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1761 let mut language_indent_size = IndentSize::default();
1762 for (row_range, original_indent_column) in row_ranges {
1763 let new_edited_row_range = if request.is_block_mode {
1764 row_range.start..row_range.start + 1
1765 } else {
1766 row_range.clone()
1767 };
1768
1769 let suggestions = snapshot
1770 .suggest_autoindents(new_edited_row_range.clone())
1771 .into_iter()
1772 .flatten();
1773 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1774 if let Some(suggestion) = suggestion {
1775 // Find the indent size based on the language for this row.
1776 while let Some((row, size)) = language_indent_sizes.peek() {
1777 if *row > new_row {
1778 break;
1779 }
1780 language_indent_size = *size;
1781 language_indent_sizes.next();
1782 }
1783
1784 let suggested_indent = indent_sizes
1785 .get(&suggestion.basis_row)
1786 .copied()
1787 .map(|e| e.0)
1788 .unwrap_or_else(|| {
1789 snapshot.indent_size_for_line(suggestion.basis_row)
1790 })
1791 .with_delta(suggestion.delta, language_indent_size);
1792
1793 if old_suggestions.get(&new_row).is_none_or(
1794 |(old_indentation, was_within_error)| {
1795 suggested_indent != *old_indentation
1796 && (!suggestion.within_error || *was_within_error)
1797 },
1798 ) {
1799 indent_sizes.insert(
1800 new_row,
1801 (suggested_indent, request.ignore_empty_lines),
1802 );
1803 }
1804 }
1805 }
1806
1807 if let (true, Some(original_indent_column)) =
1808 (request.is_block_mode, original_indent_column)
1809 {
1810 let new_indent =
1811 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1812 *indent
1813 } else {
1814 snapshot.indent_size_for_line(row_range.start)
1815 };
1816 let delta = new_indent.len as i64 - original_indent_column as i64;
1817 if delta != 0 {
1818 for row in row_range.skip(1) {
1819 indent_sizes.entry(row).or_insert_with(|| {
1820 let mut size = snapshot.indent_size_for_line(row);
1821 if size.kind == new_indent.kind {
1822 match delta.cmp(&0) {
1823 Ordering::Greater => size.len += delta as u32,
1824 Ordering::Less => {
1825 size.len = size.len.saturating_sub(-delta as u32)
1826 }
1827 Ordering::Equal => {}
1828 }
1829 }
1830 (size, request.ignore_empty_lines)
1831 });
1832 }
1833 }
1834 }
1835
1836 yield_now().await;
1837 }
1838 }
1839
1840 indent_sizes
1841 .into_iter()
1842 .filter_map(|(row, (indent, ignore_empty_lines))| {
1843 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1844 None
1845 } else {
1846 Some((row, indent))
1847 }
1848 })
1849 .collect()
1850 })
1851 }
1852
1853 fn apply_autoindents(
1854 &mut self,
1855 indent_sizes: BTreeMap<u32, IndentSize>,
1856 cx: &mut Context<Self>,
1857 ) {
1858 self.autoindent_requests.clear();
1859 for tx in self.wait_for_autoindent_txs.drain(..) {
1860 tx.send(()).ok();
1861 }
1862
1863 let edits: Vec<_> = indent_sizes
1864 .into_iter()
1865 .filter_map(|(row, indent_size)| {
1866 let current_size = indent_size_for_line(self, row);
1867 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1868 })
1869 .collect();
1870
1871 let preserve_preview = self.preserve_preview();
1872 self.edit(edits, None, cx);
1873 if preserve_preview {
1874 self.refresh_preview();
1875 }
1876 }
1877
1878 /// Create a minimal edit that will cause the given row to be indented
1879 /// with the given size. After applying this edit, the length of the line
1880 /// will always be at least `new_size.len`.
1881 pub fn edit_for_indent_size_adjustment(
1882 row: u32,
1883 current_size: IndentSize,
1884 new_size: IndentSize,
1885 ) -> Option<(Range<Point>, String)> {
1886 if new_size.kind == current_size.kind {
1887 match new_size.len.cmp(¤t_size.len) {
1888 Ordering::Greater => {
1889 let point = Point::new(row, 0);
1890 Some((
1891 point..point,
1892 iter::repeat(new_size.char())
1893 .take((new_size.len - current_size.len) as usize)
1894 .collect::<String>(),
1895 ))
1896 }
1897
1898 Ordering::Less => Some((
1899 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1900 String::new(),
1901 )),
1902
1903 Ordering::Equal => None,
1904 }
1905 } else {
1906 Some((
1907 Point::new(row, 0)..Point::new(row, current_size.len),
1908 iter::repeat(new_size.char())
1909 .take(new_size.len as usize)
1910 .collect::<String>(),
1911 ))
1912 }
1913 }
1914
1915 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1916 /// and the given new text.
1917 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1918 let old_text = self.as_rope().clone();
1919 let base_version = self.version();
1920 cx.background_executor()
1921 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1922 let old_text = old_text.to_string();
1923 let line_ending = LineEnding::detect(&new_text);
1924 LineEnding::normalize(&mut new_text);
1925 let edits = text_diff(&old_text, &new_text);
1926 Diff {
1927 base_version,
1928 line_ending,
1929 edits,
1930 }
1931 })
1932 }
1933
1934 /// Spawns a background task that searches the buffer for any whitespace
1935 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1936 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1937 let old_text = self.as_rope().clone();
1938 let line_ending = self.line_ending();
1939 let base_version = self.version();
1940 cx.background_spawn(async move {
1941 let ranges = trailing_whitespace_ranges(&old_text);
1942 let empty = Arc::<str>::from("");
1943 Diff {
1944 base_version,
1945 line_ending,
1946 edits: ranges
1947 .into_iter()
1948 .map(|range| (range, empty.clone()))
1949 .collect(),
1950 }
1951 })
1952 }
1953
1954 /// Ensures that the buffer ends with a single newline character, and
1955 /// no other whitespace. Skips if the buffer is empty.
1956 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1957 let len = self.len();
1958 if len == 0 {
1959 return;
1960 }
1961 let mut offset = len;
1962 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1963 let non_whitespace_len = chunk
1964 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1965 .len();
1966 offset -= chunk.len();
1967 offset += non_whitespace_len;
1968 if non_whitespace_len != 0 {
1969 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1970 return;
1971 }
1972 break;
1973 }
1974 }
1975 self.edit([(offset..len, "\n")], None, cx);
1976 }
1977
1978 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1979 /// calculated, then adjust the diff to account for those changes, and discard any
1980 /// parts of the diff that conflict with those changes.
1981 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1982 let snapshot = self.snapshot();
1983 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1984 let mut delta = 0;
1985 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1986 while let Some(edit_since) = edits_since.peek() {
1987 // If the edit occurs after a diff hunk, then it does not
1988 // affect that hunk.
1989 if edit_since.old.start > range.end {
1990 break;
1991 }
1992 // If the edit precedes the diff hunk, then adjust the hunk
1993 // to reflect the edit.
1994 else if edit_since.old.end < range.start {
1995 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1996 edits_since.next();
1997 }
1998 // If the edit intersects a diff hunk, then discard that hunk.
1999 else {
2000 return None;
2001 }
2002 }
2003
2004 let start = (range.start as i64 + delta) as usize;
2005 let end = (range.end as i64 + delta) as usize;
2006 Some((start..end, new_text))
2007 });
2008
2009 self.start_transaction();
2010 self.text.set_line_ending(diff.line_ending);
2011 self.edit(adjusted_edits, None, cx);
2012 self.end_transaction(cx)
2013 }
2014
2015 pub fn has_unsaved_edits(&self) -> bool {
2016 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2017
2018 if last_version == self.version {
2019 self.has_unsaved_edits
2020 .set((last_version, has_unsaved_edits));
2021 return has_unsaved_edits;
2022 }
2023
2024 let has_edits = self.has_edits_since(&self.saved_version);
2025 self.has_unsaved_edits
2026 .set((self.version.clone(), has_edits));
2027 has_edits
2028 }
2029
2030 /// Checks if the buffer has unsaved changes.
2031 pub fn is_dirty(&self) -> bool {
2032 if self.capability == Capability::ReadOnly {
2033 return false;
2034 }
2035 if self.has_conflict {
2036 return true;
2037 }
2038 match self.file.as_ref().map(|f| f.disk_state()) {
2039 Some(DiskState::New) | Some(DiskState::Deleted) => {
2040 !self.is_empty() && self.has_unsaved_edits()
2041 }
2042 _ => self.has_unsaved_edits(),
2043 }
2044 }
2045
2046 /// Checks if the buffer and its file have both changed since the buffer
2047 /// was last saved or reloaded.
2048 pub fn has_conflict(&self) -> bool {
2049 if self.has_conflict {
2050 return true;
2051 }
2052 let Some(file) = self.file.as_ref() else {
2053 return false;
2054 };
2055 match file.disk_state() {
2056 DiskState::New => false,
2057 DiskState::Present { mtime } => match self.saved_mtime {
2058 Some(saved_mtime) => {
2059 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2060 }
2061 None => true,
2062 },
2063 DiskState::Deleted => false,
2064 }
2065 }
2066
2067 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2068 pub fn subscribe(&mut self) -> Subscription {
2069 self.text.subscribe()
2070 }
2071
2072 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2073 ///
2074 /// This allows downstream code to check if the buffer's text has changed without
2075 /// waiting for an effect cycle, which would be required if using eents.
2076 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2077 if let Err(ix) = self
2078 .change_bits
2079 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2080 {
2081 self.change_bits.insert(ix, bit);
2082 }
2083 }
2084
2085 /// Set the change bit for all "listeners".
2086 fn was_changed(&mut self) {
2087 self.change_bits.retain(|change_bit| {
2088 change_bit
2089 .upgrade()
2090 .inspect(|bit| {
2091 _ = bit.replace(true);
2092 })
2093 .is_some()
2094 });
2095 }
2096
2097 /// Starts a transaction, if one is not already in-progress. When undoing or
2098 /// redoing edits, all of the edits performed within a transaction are undone
2099 /// or redone together.
2100 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2101 self.start_transaction_at(Instant::now())
2102 }
2103
2104 /// Starts a transaction, providing the current time. Subsequent transactions
2105 /// that occur within a short period of time will be grouped together. This
2106 /// is controlled by the buffer's undo grouping duration.
2107 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2108 self.transaction_depth += 1;
2109 if self.was_dirty_before_starting_transaction.is_none() {
2110 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2111 }
2112 self.text.start_transaction_at(now)
2113 }
2114
2115 /// Terminates the current transaction, if this is the outermost transaction.
2116 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2117 self.end_transaction_at(Instant::now(), cx)
2118 }
2119
2120 /// Terminates the current transaction, providing the current time. Subsequent transactions
2121 /// that occur within a short period of time will be grouped together. This
2122 /// is controlled by the buffer's undo grouping duration.
2123 pub fn end_transaction_at(
2124 &mut self,
2125 now: Instant,
2126 cx: &mut Context<Self>,
2127 ) -> Option<TransactionId> {
2128 assert!(self.transaction_depth > 0);
2129 self.transaction_depth -= 1;
2130 let was_dirty = if self.transaction_depth == 0 {
2131 self.was_dirty_before_starting_transaction.take().unwrap()
2132 } else {
2133 false
2134 };
2135 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2136 self.did_edit(&start_version, was_dirty, cx);
2137 Some(transaction_id)
2138 } else {
2139 None
2140 }
2141 }
2142
2143 /// Manually add a transaction to the buffer's undo history.
2144 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2145 self.text.push_transaction(transaction, now);
2146 }
2147
2148 /// Differs from `push_transaction` in that it does not clear the redo
2149 /// stack. Intended to be used to create a parent transaction to merge
2150 /// potential child transactions into.
2151 ///
2152 /// The caller is responsible for removing it from the undo history using
2153 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2154 /// are merged into this transaction, the caller is responsible for ensuring
2155 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2156 /// cleared is to create transactions with the usual `start_transaction` and
2157 /// `end_transaction` methods and merging the resulting transactions into
2158 /// the transaction created by this method
2159 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2160 self.text.push_empty_transaction(now)
2161 }
2162
2163 /// Prevent the last transaction from being grouped with any subsequent transactions,
2164 /// even if they occur with the buffer's undo grouping duration.
2165 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2166 self.text.finalize_last_transaction()
2167 }
2168
2169 /// Manually group all changes since a given transaction.
2170 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2171 self.text.group_until_transaction(transaction_id);
2172 }
2173
2174 /// Manually remove a transaction from the buffer's undo history
2175 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2176 self.text.forget_transaction(transaction_id)
2177 }
2178
2179 /// Retrieve a transaction from the buffer's undo history
2180 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2181 self.text.get_transaction(transaction_id)
2182 }
2183
2184 /// Manually merge two transactions in the buffer's undo history.
2185 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2186 self.text.merge_transactions(transaction, destination);
2187 }
2188
2189 /// Waits for the buffer to receive operations with the given timestamps.
2190 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2191 &mut self,
2192 edit_ids: It,
2193 ) -> impl Future<Output = Result<()>> + use<It> {
2194 self.text.wait_for_edits(edit_ids)
2195 }
2196
2197 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2198 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2199 &mut self,
2200 anchors: It,
2201 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2202 self.text.wait_for_anchors(anchors)
2203 }
2204
2205 /// Waits for the buffer to receive operations up to the given version.
2206 pub fn wait_for_version(
2207 &mut self,
2208 version: clock::Global,
2209 ) -> impl Future<Output = Result<()>> + use<> {
2210 self.text.wait_for_version(version)
2211 }
2212
2213 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2214 /// [`Buffer::wait_for_version`] to resolve with an error.
2215 pub fn give_up_waiting(&mut self) {
2216 self.text.give_up_waiting();
2217 }
2218
2219 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2220 let mut rx = None;
2221 if !self.autoindent_requests.is_empty() {
2222 let channel = oneshot::channel();
2223 self.wait_for_autoindent_txs.push(channel.0);
2224 rx = Some(channel.1);
2225 }
2226 rx
2227 }
2228
2229 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2230 pub fn set_active_selections(
2231 &mut self,
2232 selections: Arc<[Selection<Anchor>]>,
2233 line_mode: bool,
2234 cursor_shape: CursorShape,
2235 cx: &mut Context<Self>,
2236 ) {
2237 let lamport_timestamp = self.text.lamport_clock.tick();
2238 self.remote_selections.insert(
2239 self.text.replica_id(),
2240 SelectionSet {
2241 selections: selections.clone(),
2242 lamport_timestamp,
2243 line_mode,
2244 cursor_shape,
2245 },
2246 );
2247 self.send_operation(
2248 Operation::UpdateSelections {
2249 selections,
2250 line_mode,
2251 lamport_timestamp,
2252 cursor_shape,
2253 },
2254 true,
2255 cx,
2256 );
2257 self.non_text_state_update_count += 1;
2258 cx.notify();
2259 }
2260
2261 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2262 /// this replica.
2263 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2264 if self
2265 .remote_selections
2266 .get(&self.text.replica_id())
2267 .is_none_or(|set| !set.selections.is_empty())
2268 {
2269 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2270 }
2271 }
2272
2273 pub fn set_agent_selections(
2274 &mut self,
2275 selections: Arc<[Selection<Anchor>]>,
2276 line_mode: bool,
2277 cursor_shape: CursorShape,
2278 cx: &mut Context<Self>,
2279 ) {
2280 let lamport_timestamp = self.text.lamport_clock.tick();
2281 self.remote_selections.insert(
2282 ReplicaId::AGENT,
2283 SelectionSet {
2284 selections,
2285 lamport_timestamp,
2286 line_mode,
2287 cursor_shape,
2288 },
2289 );
2290 self.non_text_state_update_count += 1;
2291 cx.notify();
2292 }
2293
2294 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2295 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2296 }
2297
2298 /// Replaces the buffer's entire text.
2299 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2300 where
2301 T: Into<Arc<str>>,
2302 {
2303 self.autoindent_requests.clear();
2304 self.edit([(0..self.len(), text)], None, cx)
2305 }
2306
2307 /// Appends the given text to the end of the buffer.
2308 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2309 where
2310 T: Into<Arc<str>>,
2311 {
2312 self.edit([(self.len()..self.len(), text)], None, cx)
2313 }
2314
2315 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2316 /// delete, and a string of text to insert at that location.
2317 ///
2318 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2319 /// request for the edited ranges, which will be processed when the buffer finishes
2320 /// parsing.
2321 ///
2322 /// Parsing takes place at the end of a transaction, and may compute synchronously
2323 /// or asynchronously, depending on the changes.
2324 pub fn edit<I, S, T>(
2325 &mut self,
2326 edits_iter: I,
2327 autoindent_mode: Option<AutoindentMode>,
2328 cx: &mut Context<Self>,
2329 ) -> Option<clock::Lamport>
2330 where
2331 I: IntoIterator<Item = (Range<S>, T)>,
2332 S: ToOffset,
2333 T: Into<Arc<str>>,
2334 {
2335 // Skip invalid edits and coalesce contiguous ones.
2336 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2337
2338 for (range, new_text) in edits_iter {
2339 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2340
2341 if range.start > range.end {
2342 mem::swap(&mut range.start, &mut range.end);
2343 }
2344 let new_text = new_text.into();
2345 if !new_text.is_empty() || !range.is_empty() {
2346 if let Some((prev_range, prev_text)) = edits.last_mut()
2347 && prev_range.end >= range.start
2348 {
2349 prev_range.end = cmp::max(prev_range.end, range.end);
2350 *prev_text = format!("{prev_text}{new_text}").into();
2351 } else {
2352 edits.push((range, new_text));
2353 }
2354 }
2355 }
2356 if edits.is_empty() {
2357 return None;
2358 }
2359
2360 self.start_transaction();
2361 self.pending_autoindent.take();
2362 let autoindent_request = autoindent_mode
2363 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2364
2365 let edit_operation = self
2366 .text
2367 .edit(edits.iter().cloned(), cx.background_executor());
2368 let edit_id = edit_operation.timestamp();
2369
2370 if let Some((before_edit, mode)) = autoindent_request {
2371 let mut delta = 0isize;
2372 let mut previous_setting = None;
2373 let entries: Vec<_> = edits
2374 .into_iter()
2375 .enumerate()
2376 .zip(&edit_operation.as_edit().unwrap().new_text)
2377 .filter(|((_, (range, _)), _)| {
2378 let language = before_edit.language_at(range.start);
2379 let language_id = language.map(|l| l.id());
2380 if let Some((cached_language_id, auto_indent)) = previous_setting
2381 && cached_language_id == language_id
2382 {
2383 auto_indent
2384 } else {
2385 // The auto-indent setting is not present in editorconfigs, hence
2386 // we can avoid passing the file here.
2387 let auto_indent =
2388 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2389 previous_setting = Some((language_id, auto_indent));
2390 auto_indent
2391 }
2392 })
2393 .map(|((ix, (range, _)), new_text)| {
2394 let new_text_length = new_text.len();
2395 let old_start = range.start.to_point(&before_edit);
2396 let new_start = (delta + range.start as isize) as usize;
2397 let range_len = range.end - range.start;
2398 delta += new_text_length as isize - range_len as isize;
2399
2400 // Decide what range of the insertion to auto-indent, and whether
2401 // the first line of the insertion should be considered a newly-inserted line
2402 // or an edit to an existing line.
2403 let mut range_of_insertion_to_indent = 0..new_text_length;
2404 let mut first_line_is_new = true;
2405
2406 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2407 let old_line_end = before_edit.line_len(old_start.row);
2408
2409 if old_start.column > old_line_start {
2410 first_line_is_new = false;
2411 }
2412
2413 if !new_text.contains('\n')
2414 && (old_start.column + (range_len as u32) < old_line_end
2415 || old_line_end == old_line_start)
2416 {
2417 first_line_is_new = false;
2418 }
2419
2420 // When inserting text starting with a newline, avoid auto-indenting the
2421 // previous line.
2422 if new_text.starts_with('\n') {
2423 range_of_insertion_to_indent.start += 1;
2424 first_line_is_new = true;
2425 }
2426
2427 let mut original_indent_column = None;
2428 if let AutoindentMode::Block {
2429 original_indent_columns,
2430 } = &mode
2431 {
2432 original_indent_column = Some(if new_text.starts_with('\n') {
2433 indent_size_for_text(
2434 new_text[range_of_insertion_to_indent.clone()].chars(),
2435 )
2436 .len
2437 } else {
2438 original_indent_columns
2439 .get(ix)
2440 .copied()
2441 .flatten()
2442 .unwrap_or_else(|| {
2443 indent_size_for_text(
2444 new_text[range_of_insertion_to_indent.clone()].chars(),
2445 )
2446 .len
2447 })
2448 });
2449
2450 // Avoid auto-indenting the line after the edit.
2451 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2452 range_of_insertion_to_indent.end -= 1;
2453 }
2454 }
2455
2456 AutoindentRequestEntry {
2457 first_line_is_new,
2458 original_indent_column,
2459 indent_size: before_edit.language_indent_size_at(range.start, cx),
2460 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2461 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2462 }
2463 })
2464 .collect();
2465
2466 if !entries.is_empty() {
2467 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2468 before_edit,
2469 entries,
2470 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2471 ignore_empty_lines: false,
2472 }));
2473 }
2474 }
2475
2476 self.end_transaction(cx);
2477 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2478 Some(edit_id)
2479 }
2480
2481 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2482 self.was_changed();
2483
2484 if self.edits_since::<usize>(old_version).next().is_none() {
2485 return;
2486 }
2487
2488 self.reparse(cx);
2489 cx.emit(BufferEvent::Edited);
2490 if was_dirty != self.is_dirty() {
2491 cx.emit(BufferEvent::DirtyChanged);
2492 }
2493 cx.notify();
2494 }
2495
2496 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2497 where
2498 I: IntoIterator<Item = Range<T>>,
2499 T: ToOffset + Copy,
2500 {
2501 let before_edit = self.snapshot();
2502 let entries = ranges
2503 .into_iter()
2504 .map(|range| AutoindentRequestEntry {
2505 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2506 first_line_is_new: true,
2507 indent_size: before_edit.language_indent_size_at(range.start, cx),
2508 original_indent_column: None,
2509 })
2510 .collect();
2511 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2512 before_edit,
2513 entries,
2514 is_block_mode: false,
2515 ignore_empty_lines: true,
2516 }));
2517 self.request_autoindent(cx);
2518 }
2519
2520 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2521 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2522 pub fn insert_empty_line(
2523 &mut self,
2524 position: impl ToPoint,
2525 space_above: bool,
2526 space_below: bool,
2527 cx: &mut Context<Self>,
2528 ) -> Point {
2529 let mut position = position.to_point(self);
2530
2531 self.start_transaction();
2532
2533 self.edit(
2534 [(position..position, "\n")],
2535 Some(AutoindentMode::EachLine),
2536 cx,
2537 );
2538
2539 if position.column > 0 {
2540 position += Point::new(1, 0);
2541 }
2542
2543 if !self.is_line_blank(position.row) {
2544 self.edit(
2545 [(position..position, "\n")],
2546 Some(AutoindentMode::EachLine),
2547 cx,
2548 );
2549 }
2550
2551 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2552 self.edit(
2553 [(position..position, "\n")],
2554 Some(AutoindentMode::EachLine),
2555 cx,
2556 );
2557 position.row += 1;
2558 }
2559
2560 if space_below
2561 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2562 {
2563 self.edit(
2564 [(position..position, "\n")],
2565 Some(AutoindentMode::EachLine),
2566 cx,
2567 );
2568 }
2569
2570 self.end_transaction(cx);
2571
2572 position
2573 }
2574
2575 /// Applies the given remote operations to the buffer.
2576 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2577 self.pending_autoindent.take();
2578 let was_dirty = self.is_dirty();
2579 let old_version = self.version.clone();
2580 let mut deferred_ops = Vec::new();
2581 let buffer_ops = ops
2582 .into_iter()
2583 .filter_map(|op| match op {
2584 Operation::Buffer(op) => Some(op),
2585 _ => {
2586 if self.can_apply_op(&op) {
2587 self.apply_op(op, cx);
2588 } else {
2589 deferred_ops.push(op);
2590 }
2591 None
2592 }
2593 })
2594 .collect::<Vec<_>>();
2595 for operation in buffer_ops.iter() {
2596 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2597 }
2598 self.text
2599 .apply_ops(buffer_ops, Some(cx.background_executor()));
2600 self.deferred_ops.insert(deferred_ops);
2601 self.flush_deferred_ops(cx);
2602 self.did_edit(&old_version, was_dirty, cx);
2603 // Notify independently of whether the buffer was edited as the operations could include a
2604 // selection update.
2605 cx.notify();
2606 }
2607
2608 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2609 let mut deferred_ops = Vec::new();
2610 for op in self.deferred_ops.drain().iter().cloned() {
2611 if self.can_apply_op(&op) {
2612 self.apply_op(op, cx);
2613 } else {
2614 deferred_ops.push(op);
2615 }
2616 }
2617 self.deferred_ops.insert(deferred_ops);
2618 }
2619
2620 pub fn has_deferred_ops(&self) -> bool {
2621 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2622 }
2623
2624 fn can_apply_op(&self, operation: &Operation) -> bool {
2625 match operation {
2626 Operation::Buffer(_) => {
2627 unreachable!("buffer operations should never be applied at this layer")
2628 }
2629 Operation::UpdateDiagnostics {
2630 diagnostics: diagnostic_set,
2631 ..
2632 } => diagnostic_set.iter().all(|diagnostic| {
2633 self.text.can_resolve(&diagnostic.range.start)
2634 && self.text.can_resolve(&diagnostic.range.end)
2635 }),
2636 Operation::UpdateSelections { selections, .. } => selections
2637 .iter()
2638 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2639 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2640 }
2641 }
2642
2643 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2644 match operation {
2645 Operation::Buffer(_) => {
2646 unreachable!("buffer operations should never be applied at this layer")
2647 }
2648 Operation::UpdateDiagnostics {
2649 server_id,
2650 diagnostics: diagnostic_set,
2651 lamport_timestamp,
2652 } => {
2653 let snapshot = self.snapshot();
2654 self.apply_diagnostic_update(
2655 server_id,
2656 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2657 lamport_timestamp,
2658 cx,
2659 );
2660 }
2661 Operation::UpdateSelections {
2662 selections,
2663 lamport_timestamp,
2664 line_mode,
2665 cursor_shape,
2666 } => {
2667 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2668 && set.lamport_timestamp > lamport_timestamp
2669 {
2670 return;
2671 }
2672
2673 self.remote_selections.insert(
2674 lamport_timestamp.replica_id,
2675 SelectionSet {
2676 selections,
2677 lamport_timestamp,
2678 line_mode,
2679 cursor_shape,
2680 },
2681 );
2682 self.text.lamport_clock.observe(lamport_timestamp);
2683 self.non_text_state_update_count += 1;
2684 }
2685 Operation::UpdateCompletionTriggers {
2686 triggers,
2687 lamport_timestamp,
2688 server_id,
2689 } => {
2690 if triggers.is_empty() {
2691 self.completion_triggers_per_language_server
2692 .remove(&server_id);
2693 self.completion_triggers = self
2694 .completion_triggers_per_language_server
2695 .values()
2696 .flat_map(|triggers| triggers.iter().cloned())
2697 .collect();
2698 } else {
2699 self.completion_triggers_per_language_server
2700 .insert(server_id, triggers.iter().cloned().collect());
2701 self.completion_triggers.extend(triggers);
2702 }
2703 self.text.lamport_clock.observe(lamport_timestamp);
2704 }
2705 Operation::UpdateLineEnding {
2706 line_ending,
2707 lamport_timestamp,
2708 } => {
2709 self.text.set_line_ending(line_ending);
2710 self.text.lamport_clock.observe(lamport_timestamp);
2711 }
2712 }
2713 }
2714
2715 fn apply_diagnostic_update(
2716 &mut self,
2717 server_id: LanguageServerId,
2718 diagnostics: DiagnosticSet,
2719 lamport_timestamp: clock::Lamport,
2720 cx: &mut Context<Self>,
2721 ) {
2722 if lamport_timestamp > self.diagnostics_timestamp {
2723 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2724 if diagnostics.is_empty() {
2725 if let Ok(ix) = ix {
2726 self.diagnostics.remove(ix);
2727 }
2728 } else {
2729 match ix {
2730 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2731 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2732 };
2733 }
2734 self.diagnostics_timestamp = lamport_timestamp;
2735 self.non_text_state_update_count += 1;
2736 self.text.lamport_clock.observe(lamport_timestamp);
2737 cx.notify();
2738 cx.emit(BufferEvent::DiagnosticsUpdated);
2739 }
2740 }
2741
2742 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2743 self.was_changed();
2744 cx.emit(BufferEvent::Operation {
2745 operation,
2746 is_local,
2747 });
2748 }
2749
2750 /// Removes the selections for a given peer.
2751 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2752 self.remote_selections.remove(&replica_id);
2753 cx.notify();
2754 }
2755
2756 /// Undoes the most recent transaction.
2757 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2758 let was_dirty = self.is_dirty();
2759 let old_version = self.version.clone();
2760
2761 if let Some((transaction_id, operation)) = self.text.undo() {
2762 self.send_operation(Operation::Buffer(operation), true, cx);
2763 self.did_edit(&old_version, was_dirty, cx);
2764 Some(transaction_id)
2765 } else {
2766 None
2767 }
2768 }
2769
2770 /// Manually undoes a specific transaction in the buffer's undo history.
2771 pub fn undo_transaction(
2772 &mut self,
2773 transaction_id: TransactionId,
2774 cx: &mut Context<Self>,
2775 ) -> bool {
2776 let was_dirty = self.is_dirty();
2777 let old_version = self.version.clone();
2778 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2779 self.send_operation(Operation::Buffer(operation), true, cx);
2780 self.did_edit(&old_version, was_dirty, cx);
2781 true
2782 } else {
2783 false
2784 }
2785 }
2786
2787 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2788 pub fn undo_to_transaction(
2789 &mut self,
2790 transaction_id: TransactionId,
2791 cx: &mut Context<Self>,
2792 ) -> bool {
2793 let was_dirty = self.is_dirty();
2794 let old_version = self.version.clone();
2795
2796 let operations = self.text.undo_to_transaction(transaction_id);
2797 let undone = !operations.is_empty();
2798 for operation in operations {
2799 self.send_operation(Operation::Buffer(operation), true, cx);
2800 }
2801 if undone {
2802 self.did_edit(&old_version, was_dirty, cx)
2803 }
2804 undone
2805 }
2806
2807 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2808 let was_dirty = self.is_dirty();
2809 let operation = self.text.undo_operations(counts);
2810 let old_version = self.version.clone();
2811 self.send_operation(Operation::Buffer(operation), true, cx);
2812 self.did_edit(&old_version, was_dirty, cx);
2813 }
2814
2815 /// Manually redoes a specific transaction in the buffer's redo history.
2816 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2817 let was_dirty = self.is_dirty();
2818 let old_version = self.version.clone();
2819
2820 if let Some((transaction_id, operation)) = self.text.redo() {
2821 self.send_operation(Operation::Buffer(operation), true, cx);
2822 self.did_edit(&old_version, was_dirty, cx);
2823 Some(transaction_id)
2824 } else {
2825 None
2826 }
2827 }
2828
2829 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2830 pub fn redo_to_transaction(
2831 &mut self,
2832 transaction_id: TransactionId,
2833 cx: &mut Context<Self>,
2834 ) -> bool {
2835 let was_dirty = self.is_dirty();
2836 let old_version = self.version.clone();
2837
2838 let operations = self.text.redo_to_transaction(transaction_id);
2839 let redone = !operations.is_empty();
2840 for operation in operations {
2841 self.send_operation(Operation::Buffer(operation), true, cx);
2842 }
2843 if redone {
2844 self.did_edit(&old_version, was_dirty, cx)
2845 }
2846 redone
2847 }
2848
2849 /// Override current completion triggers with the user-provided completion triggers.
2850 pub fn set_completion_triggers(
2851 &mut self,
2852 server_id: LanguageServerId,
2853 triggers: BTreeSet<String>,
2854 cx: &mut Context<Self>,
2855 ) {
2856 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2857 if triggers.is_empty() {
2858 self.completion_triggers_per_language_server
2859 .remove(&server_id);
2860 self.completion_triggers = self
2861 .completion_triggers_per_language_server
2862 .values()
2863 .flat_map(|triggers| triggers.iter().cloned())
2864 .collect();
2865 } else {
2866 self.completion_triggers_per_language_server
2867 .insert(server_id, triggers.clone());
2868 self.completion_triggers.extend(triggers.iter().cloned());
2869 }
2870 self.send_operation(
2871 Operation::UpdateCompletionTriggers {
2872 triggers: triggers.into_iter().collect(),
2873 lamport_timestamp: self.completion_triggers_timestamp,
2874 server_id,
2875 },
2876 true,
2877 cx,
2878 );
2879 cx.notify();
2880 }
2881
2882 /// Returns a list of strings which trigger a completion menu for this language.
2883 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2884 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2885 &self.completion_triggers
2886 }
2887
2888 /// Call this directly after performing edits to prevent the preview tab
2889 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2890 /// to return false until there are additional edits.
2891 pub fn refresh_preview(&mut self) {
2892 self.preview_version = self.version.clone();
2893 }
2894
2895 /// Whether we should preserve the preview status of a tab containing this buffer.
2896 pub fn preserve_preview(&self) -> bool {
2897 !self.has_edits_since(&self.preview_version)
2898 }
2899}
2900
2901#[doc(hidden)]
2902#[cfg(any(test, feature = "test-support"))]
2903impl Buffer {
2904 pub fn edit_via_marked_text(
2905 &mut self,
2906 marked_string: &str,
2907 autoindent_mode: Option<AutoindentMode>,
2908 cx: &mut Context<Self>,
2909 ) {
2910 let edits = self.edits_for_marked_text(marked_string);
2911 self.edit(edits, autoindent_mode, cx);
2912 }
2913
2914 pub fn set_group_interval(&mut self, group_interval: Duration) {
2915 self.text.set_group_interval(group_interval);
2916 }
2917
2918 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2919 where
2920 T: rand::Rng,
2921 {
2922 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2923 let mut last_end = None;
2924 for _ in 0..old_range_count {
2925 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2926 break;
2927 }
2928
2929 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2930 let mut range = self.random_byte_range(new_start, rng);
2931 if rng.random_bool(0.2) {
2932 mem::swap(&mut range.start, &mut range.end);
2933 }
2934 last_end = Some(range.end);
2935
2936 let new_text_len = rng.random_range(0..10);
2937 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2938 new_text = new_text.to_uppercase();
2939
2940 edits.push((range, new_text));
2941 }
2942 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2943 self.edit(edits, None, cx);
2944 }
2945
2946 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2947 let was_dirty = self.is_dirty();
2948 let old_version = self.version.clone();
2949
2950 let ops = self.text.randomly_undo_redo(rng);
2951 if !ops.is_empty() {
2952 for op in ops {
2953 self.send_operation(Operation::Buffer(op), true, cx);
2954 self.did_edit(&old_version, was_dirty, cx);
2955 }
2956 }
2957 }
2958}
2959
2960impl EventEmitter<BufferEvent> for Buffer {}
2961
2962impl Deref for Buffer {
2963 type Target = TextBuffer;
2964
2965 fn deref(&self) -> &Self::Target {
2966 &self.text
2967 }
2968}
2969
2970impl BufferSnapshot {
2971 /// Returns [`IndentSize`] for a given line that respects user settings and
2972 /// language preferences.
2973 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2974 indent_size_for_line(self, row)
2975 }
2976
2977 /// Returns [`IndentSize`] for a given position that respects user settings
2978 /// and language preferences.
2979 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2980 let settings = language_settings(
2981 self.language_at(position).map(|l| l.name()),
2982 self.file(),
2983 cx,
2984 );
2985 if settings.hard_tabs {
2986 IndentSize::tab()
2987 } else {
2988 IndentSize::spaces(settings.tab_size.get())
2989 }
2990 }
2991
2992 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2993 /// is passed in as `single_indent_size`.
2994 pub fn suggested_indents(
2995 &self,
2996 rows: impl Iterator<Item = u32>,
2997 single_indent_size: IndentSize,
2998 ) -> BTreeMap<u32, IndentSize> {
2999 let mut result = BTreeMap::new();
3000
3001 for row_range in contiguous_ranges(rows, 10) {
3002 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3003 Some(suggestions) => suggestions,
3004 _ => break,
3005 };
3006
3007 for (row, suggestion) in row_range.zip(suggestions) {
3008 let indent_size = if let Some(suggestion) = suggestion {
3009 result
3010 .get(&suggestion.basis_row)
3011 .copied()
3012 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3013 .with_delta(suggestion.delta, single_indent_size)
3014 } else {
3015 self.indent_size_for_line(row)
3016 };
3017
3018 result.insert(row, indent_size);
3019 }
3020 }
3021
3022 result
3023 }
3024
3025 fn suggest_autoindents(
3026 &self,
3027 row_range: Range<u32>,
3028 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3029 let config = &self.language.as_ref()?.config;
3030 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3031
3032 #[derive(Debug, Clone)]
3033 struct StartPosition {
3034 start: Point,
3035 suffix: SharedString,
3036 }
3037
3038 // Find the suggested indentation ranges based on the syntax tree.
3039 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3040 let end = Point::new(row_range.end, 0);
3041 let range = (start..end).to_offset(&self.text);
3042 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3043 Some(&grammar.indents_config.as_ref()?.query)
3044 });
3045 let indent_configs = matches
3046 .grammars()
3047 .iter()
3048 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3049 .collect::<Vec<_>>();
3050
3051 let mut indent_ranges = Vec::<Range<Point>>::new();
3052 let mut start_positions = Vec::<StartPosition>::new();
3053 let mut outdent_positions = Vec::<Point>::new();
3054 while let Some(mat) = matches.peek() {
3055 let mut start: Option<Point> = None;
3056 let mut end: Option<Point> = None;
3057
3058 let config = indent_configs[mat.grammar_index];
3059 for capture in mat.captures {
3060 if capture.index == config.indent_capture_ix {
3061 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3062 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3063 } else if Some(capture.index) == config.start_capture_ix {
3064 start = Some(Point::from_ts_point(capture.node.end_position()));
3065 } else if Some(capture.index) == config.end_capture_ix {
3066 end = Some(Point::from_ts_point(capture.node.start_position()));
3067 } else if Some(capture.index) == config.outdent_capture_ix {
3068 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3069 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3070 start_positions.push(StartPosition {
3071 start: Point::from_ts_point(capture.node.start_position()),
3072 suffix: suffix.clone(),
3073 });
3074 }
3075 }
3076
3077 matches.advance();
3078 if let Some((start, end)) = start.zip(end) {
3079 if start.row == end.row {
3080 continue;
3081 }
3082 let range = start..end;
3083 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3084 Err(ix) => indent_ranges.insert(ix, range),
3085 Ok(ix) => {
3086 let prev_range = &mut indent_ranges[ix];
3087 prev_range.end = prev_range.end.max(range.end);
3088 }
3089 }
3090 }
3091 }
3092
3093 let mut error_ranges = Vec::<Range<Point>>::new();
3094 let mut matches = self
3095 .syntax
3096 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3097 while let Some(mat) = matches.peek() {
3098 let node = mat.captures[0].node;
3099 let start = Point::from_ts_point(node.start_position());
3100 let end = Point::from_ts_point(node.end_position());
3101 let range = start..end;
3102 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3103 Ok(ix) | Err(ix) => ix,
3104 };
3105 let mut end_ix = ix;
3106 while let Some(existing_range) = error_ranges.get(end_ix) {
3107 if existing_range.end < end {
3108 end_ix += 1;
3109 } else {
3110 break;
3111 }
3112 }
3113 error_ranges.splice(ix..end_ix, [range]);
3114 matches.advance();
3115 }
3116
3117 outdent_positions.sort();
3118 for outdent_position in outdent_positions {
3119 // find the innermost indent range containing this outdent_position
3120 // set its end to the outdent position
3121 if let Some(range_to_truncate) = indent_ranges
3122 .iter_mut()
3123 .filter(|indent_range| indent_range.contains(&outdent_position))
3124 .next_back()
3125 {
3126 range_to_truncate.end = outdent_position;
3127 }
3128 }
3129
3130 start_positions.sort_by_key(|b| b.start);
3131
3132 // Find the suggested indentation increases and decreased based on regexes.
3133 let mut regex_outdent_map = HashMap::default();
3134 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3135 let mut start_positions_iter = start_positions.iter().peekable();
3136
3137 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3138 self.for_each_line(
3139 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3140 ..Point::new(row_range.end, 0),
3141 |row, line| {
3142 if config
3143 .decrease_indent_pattern
3144 .as_ref()
3145 .is_some_and(|regex| regex.is_match(line))
3146 {
3147 indent_change_rows.push((row, Ordering::Less));
3148 }
3149 if config
3150 .increase_indent_pattern
3151 .as_ref()
3152 .is_some_and(|regex| regex.is_match(line))
3153 {
3154 indent_change_rows.push((row + 1, Ordering::Greater));
3155 }
3156 while let Some(pos) = start_positions_iter.peek() {
3157 if pos.start.row < row {
3158 let pos = start_positions_iter.next().unwrap();
3159 last_seen_suffix
3160 .entry(pos.suffix.to_string())
3161 .or_default()
3162 .push(pos.start);
3163 } else {
3164 break;
3165 }
3166 }
3167 for rule in &config.decrease_indent_patterns {
3168 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3169 let row_start_column = self.indent_size_for_line(row).len;
3170 let basis_row = rule
3171 .valid_after
3172 .iter()
3173 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3174 .flatten()
3175 .filter(|start_point| start_point.column <= row_start_column)
3176 .max_by_key(|start_point| start_point.row);
3177 if let Some(outdent_to_row) = basis_row {
3178 regex_outdent_map.insert(row, outdent_to_row.row);
3179 }
3180 break;
3181 }
3182 }
3183 },
3184 );
3185
3186 let mut indent_changes = indent_change_rows.into_iter().peekable();
3187 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3188 prev_non_blank_row.unwrap_or(0)
3189 } else {
3190 row_range.start.saturating_sub(1)
3191 };
3192
3193 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3194 Some(row_range.map(move |row| {
3195 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3196
3197 let mut indent_from_prev_row = false;
3198 let mut outdent_from_prev_row = false;
3199 let mut outdent_to_row = u32::MAX;
3200 let mut from_regex = false;
3201
3202 while let Some((indent_row, delta)) = indent_changes.peek() {
3203 match indent_row.cmp(&row) {
3204 Ordering::Equal => match delta {
3205 Ordering::Less => {
3206 from_regex = true;
3207 outdent_from_prev_row = true
3208 }
3209 Ordering::Greater => {
3210 indent_from_prev_row = true;
3211 from_regex = true
3212 }
3213 _ => {}
3214 },
3215
3216 Ordering::Greater => break,
3217 Ordering::Less => {}
3218 }
3219
3220 indent_changes.next();
3221 }
3222
3223 for range in &indent_ranges {
3224 if range.start.row >= row {
3225 break;
3226 }
3227 if range.start.row == prev_row && range.end > row_start {
3228 indent_from_prev_row = true;
3229 }
3230 if range.end > prev_row_start && range.end <= row_start {
3231 outdent_to_row = outdent_to_row.min(range.start.row);
3232 }
3233 }
3234
3235 if let Some(basis_row) = regex_outdent_map.get(&row) {
3236 indent_from_prev_row = false;
3237 outdent_to_row = *basis_row;
3238 from_regex = true;
3239 }
3240
3241 let within_error = error_ranges
3242 .iter()
3243 .any(|e| e.start.row < row && e.end > row_start);
3244
3245 let suggestion = if outdent_to_row == prev_row
3246 || (outdent_from_prev_row && indent_from_prev_row)
3247 {
3248 Some(IndentSuggestion {
3249 basis_row: prev_row,
3250 delta: Ordering::Equal,
3251 within_error: within_error && !from_regex,
3252 })
3253 } else if indent_from_prev_row {
3254 Some(IndentSuggestion {
3255 basis_row: prev_row,
3256 delta: Ordering::Greater,
3257 within_error: within_error && !from_regex,
3258 })
3259 } else if outdent_to_row < prev_row {
3260 Some(IndentSuggestion {
3261 basis_row: outdent_to_row,
3262 delta: Ordering::Equal,
3263 within_error: within_error && !from_regex,
3264 })
3265 } else if outdent_from_prev_row {
3266 Some(IndentSuggestion {
3267 basis_row: prev_row,
3268 delta: Ordering::Less,
3269 within_error: within_error && !from_regex,
3270 })
3271 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3272 {
3273 Some(IndentSuggestion {
3274 basis_row: prev_row,
3275 delta: Ordering::Equal,
3276 within_error: within_error && !from_regex,
3277 })
3278 } else {
3279 None
3280 };
3281
3282 prev_row = row;
3283 prev_row_start = row_start;
3284 suggestion
3285 }))
3286 }
3287
3288 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3289 while row > 0 {
3290 row -= 1;
3291 if !self.is_line_blank(row) {
3292 return Some(row);
3293 }
3294 }
3295 None
3296 }
3297
3298 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3299 let captures = self.syntax.captures(range, &self.text, |grammar| {
3300 grammar
3301 .highlights_config
3302 .as_ref()
3303 .map(|config| &config.query)
3304 });
3305 let highlight_maps = captures
3306 .grammars()
3307 .iter()
3308 .map(|grammar| grammar.highlight_map())
3309 .collect();
3310 (captures, highlight_maps)
3311 }
3312
3313 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3314 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3315 /// returned in chunks where each chunk has a single syntax highlighting style and
3316 /// diagnostic status.
3317 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3318 let range = range.start.to_offset(self)..range.end.to_offset(self);
3319
3320 let mut syntax = None;
3321 if language_aware {
3322 syntax = Some(self.get_highlights(range.clone()));
3323 }
3324 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3325 let diagnostics = language_aware;
3326 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3327 }
3328
3329 pub fn highlighted_text_for_range<T: ToOffset>(
3330 &self,
3331 range: Range<T>,
3332 override_style: Option<HighlightStyle>,
3333 syntax_theme: &SyntaxTheme,
3334 ) -> HighlightedText {
3335 HighlightedText::from_buffer_range(
3336 range,
3337 &self.text,
3338 &self.syntax,
3339 override_style,
3340 syntax_theme,
3341 )
3342 }
3343
3344 /// Invokes the given callback for each line of text in the given range of the buffer.
3345 /// Uses callback to avoid allocating a string for each line.
3346 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3347 let mut line = String::new();
3348 let mut row = range.start.row;
3349 for chunk in self
3350 .as_rope()
3351 .chunks_in_range(range.to_offset(self))
3352 .chain(["\n"])
3353 {
3354 for (newline_ix, text) in chunk.split('\n').enumerate() {
3355 if newline_ix > 0 {
3356 callback(row, &line);
3357 row += 1;
3358 line.clear();
3359 }
3360 line.push_str(text);
3361 }
3362 }
3363 }
3364
3365 /// Iterates over every [`SyntaxLayer`] in the buffer.
3366 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3367 self.syntax_layers_for_range(0..self.len(), true)
3368 }
3369
3370 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3371 let offset = position.to_offset(self);
3372 self.syntax_layers_for_range(offset..offset, false)
3373 .filter(|l| l.node().end_byte() > offset)
3374 .last()
3375 }
3376
3377 pub fn syntax_layers_for_range<D: ToOffset>(
3378 &self,
3379 range: Range<D>,
3380 include_hidden: bool,
3381 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3382 self.syntax
3383 .layers_for_range(range, &self.text, include_hidden)
3384 }
3385
3386 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3387 &self,
3388 range: Range<D>,
3389 ) -> Option<SyntaxLayer<'_>> {
3390 let range = range.to_offset(self);
3391 self.syntax
3392 .layers_for_range(range, &self.text, false)
3393 .max_by(|a, b| {
3394 if a.depth != b.depth {
3395 a.depth.cmp(&b.depth)
3396 } else if a.offset.0 != b.offset.0 {
3397 a.offset.0.cmp(&b.offset.0)
3398 } else {
3399 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3400 }
3401 })
3402 }
3403
3404 /// Returns the main [`Language`].
3405 pub fn language(&self) -> Option<&Arc<Language>> {
3406 self.language.as_ref()
3407 }
3408
3409 /// Returns the [`Language`] at the given location.
3410 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3411 self.syntax_layer_at(position)
3412 .map(|info| info.language)
3413 .or(self.language.as_ref())
3414 }
3415
3416 /// Returns the settings for the language at the given location.
3417 pub fn settings_at<'a, D: ToOffset>(
3418 &'a self,
3419 position: D,
3420 cx: &'a App,
3421 ) -> Cow<'a, LanguageSettings> {
3422 language_settings(
3423 self.language_at(position).map(|l| l.name()),
3424 self.file.as_ref(),
3425 cx,
3426 )
3427 }
3428
3429 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3430 CharClassifier::new(self.language_scope_at(point))
3431 }
3432
3433 /// Returns the [`LanguageScope`] at the given location.
3434 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3435 let offset = position.to_offset(self);
3436 let mut scope = None;
3437 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3438
3439 // Use the layer that has the smallest node intersecting the given point.
3440 for layer in self
3441 .syntax
3442 .layers_for_range(offset..offset, &self.text, false)
3443 {
3444 let mut cursor = layer.node().walk();
3445
3446 let mut range = None;
3447 loop {
3448 let child_range = cursor.node().byte_range();
3449 if !child_range.contains(&offset) {
3450 break;
3451 }
3452
3453 range = Some(child_range);
3454 if cursor.goto_first_child_for_byte(offset).is_none() {
3455 break;
3456 }
3457 }
3458
3459 if let Some(range) = range
3460 && smallest_range_and_depth.as_ref().is_none_or(
3461 |(smallest_range, smallest_range_depth)| {
3462 if layer.depth > *smallest_range_depth {
3463 true
3464 } else if layer.depth == *smallest_range_depth {
3465 range.len() < smallest_range.len()
3466 } else {
3467 false
3468 }
3469 },
3470 )
3471 {
3472 smallest_range_and_depth = Some((range, layer.depth));
3473 scope = Some(LanguageScope {
3474 language: layer.language.clone(),
3475 override_id: layer.override_id(offset, &self.text),
3476 });
3477 }
3478 }
3479
3480 scope.or_else(|| {
3481 self.language.clone().map(|language| LanguageScope {
3482 language,
3483 override_id: None,
3484 })
3485 })
3486 }
3487
3488 /// Returns a tuple of the range and character kind of the word
3489 /// surrounding the given position.
3490 pub fn surrounding_word<T: ToOffset>(
3491 &self,
3492 start: T,
3493 scope_context: Option<CharScopeContext>,
3494 ) -> (Range<usize>, Option<CharKind>) {
3495 let mut start = start.to_offset(self);
3496 let mut end = start;
3497 let mut next_chars = self.chars_at(start).take(128).peekable();
3498 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3499
3500 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3501 let word_kind = cmp::max(
3502 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3503 next_chars.peek().copied().map(|c| classifier.kind(c)),
3504 );
3505
3506 for ch in prev_chars {
3507 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3508 start -= ch.len_utf8();
3509 } else {
3510 break;
3511 }
3512 }
3513
3514 for ch in next_chars {
3515 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3516 end += ch.len_utf8();
3517 } else {
3518 break;
3519 }
3520 }
3521
3522 (start..end, word_kind)
3523 }
3524
3525 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3526 /// range. When `require_larger` is true, the node found must be larger than the query range.
3527 ///
3528 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3529 /// be moved to the root of the tree.
3530 fn goto_node_enclosing_range(
3531 cursor: &mut tree_sitter::TreeCursor,
3532 query_range: &Range<usize>,
3533 require_larger: bool,
3534 ) -> bool {
3535 let mut ascending = false;
3536 loop {
3537 let mut range = cursor.node().byte_range();
3538 if query_range.is_empty() {
3539 // When the query range is empty and the current node starts after it, move to the
3540 // previous sibling to find the node the containing node.
3541 if range.start > query_range.start {
3542 cursor.goto_previous_sibling();
3543 range = cursor.node().byte_range();
3544 }
3545 } else {
3546 // When the query range is non-empty and the current node ends exactly at the start,
3547 // move to the next sibling to find a node that extends beyond the start.
3548 if range.end == query_range.start {
3549 cursor.goto_next_sibling();
3550 range = cursor.node().byte_range();
3551 }
3552 }
3553
3554 let encloses = range.contains_inclusive(query_range)
3555 && (!require_larger || range.len() > query_range.len());
3556 if !encloses {
3557 ascending = true;
3558 if !cursor.goto_parent() {
3559 return false;
3560 }
3561 continue;
3562 } else if ascending {
3563 return true;
3564 }
3565
3566 // Descend into the current node.
3567 if cursor
3568 .goto_first_child_for_byte(query_range.start)
3569 .is_none()
3570 {
3571 return true;
3572 }
3573 }
3574 }
3575
3576 pub fn syntax_ancestor<'a, T: ToOffset>(
3577 &'a self,
3578 range: Range<T>,
3579 ) -> Option<tree_sitter::Node<'a>> {
3580 let range = range.start.to_offset(self)..range.end.to_offset(self);
3581 let mut result: Option<tree_sitter::Node<'a>> = None;
3582 for layer in self
3583 .syntax
3584 .layers_for_range(range.clone(), &self.text, true)
3585 {
3586 let mut cursor = layer.node().walk();
3587
3588 // Find the node that both contains the range and is larger than it.
3589 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3590 continue;
3591 }
3592
3593 let left_node = cursor.node();
3594 let mut layer_result = left_node;
3595
3596 // For an empty range, try to find another node immediately to the right of the range.
3597 if left_node.end_byte() == range.start {
3598 let mut right_node = None;
3599 while !cursor.goto_next_sibling() {
3600 if !cursor.goto_parent() {
3601 break;
3602 }
3603 }
3604
3605 while cursor.node().start_byte() == range.start {
3606 right_node = Some(cursor.node());
3607 if !cursor.goto_first_child() {
3608 break;
3609 }
3610 }
3611
3612 // If there is a candidate node on both sides of the (empty) range, then
3613 // decide between the two by favoring a named node over an anonymous token.
3614 // If both nodes are the same in that regard, favor the right one.
3615 if let Some(right_node) = right_node
3616 && (right_node.is_named() || !left_node.is_named())
3617 {
3618 layer_result = right_node;
3619 }
3620 }
3621
3622 if let Some(previous_result) = &result
3623 && previous_result.byte_range().len() < layer_result.byte_range().len()
3624 {
3625 continue;
3626 }
3627 result = Some(layer_result);
3628 }
3629
3630 result
3631 }
3632
3633 /// Find the previous sibling syntax node at the given range.
3634 ///
3635 /// This function locates the syntax node that precedes the node containing
3636 /// the given range. It searches hierarchically by:
3637 /// 1. Finding the node that contains the given range
3638 /// 2. Looking for the previous sibling at the same tree level
3639 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3640 ///
3641 /// Returns `None` if there is no previous sibling at any ancestor level.
3642 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3643 &'a self,
3644 range: Range<T>,
3645 ) -> Option<tree_sitter::Node<'a>> {
3646 let range = range.start.to_offset(self)..range.end.to_offset(self);
3647 let mut result: Option<tree_sitter::Node<'a>> = None;
3648
3649 for layer in self
3650 .syntax
3651 .layers_for_range(range.clone(), &self.text, true)
3652 {
3653 let mut cursor = layer.node().walk();
3654
3655 // Find the node that contains the range
3656 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3657 continue;
3658 }
3659
3660 // Look for the previous sibling, moving up ancestor levels if needed
3661 loop {
3662 if cursor.goto_previous_sibling() {
3663 let layer_result = cursor.node();
3664
3665 if let Some(previous_result) = &result {
3666 if previous_result.byte_range().end < layer_result.byte_range().end {
3667 continue;
3668 }
3669 }
3670 result = Some(layer_result);
3671 break;
3672 }
3673
3674 // No sibling found at this level, try moving up to parent
3675 if !cursor.goto_parent() {
3676 break;
3677 }
3678 }
3679 }
3680
3681 result
3682 }
3683
3684 /// Find the next sibling syntax node at the given range.
3685 ///
3686 /// This function locates the syntax node that follows the node containing
3687 /// the given range. It searches hierarchically by:
3688 /// 1. Finding the node that contains the given range
3689 /// 2. Looking for the next sibling at the same tree level
3690 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3691 ///
3692 /// Returns `None` if there is no next sibling at any ancestor level.
3693 pub fn syntax_next_sibling<'a, T: ToOffset>(
3694 &'a self,
3695 range: Range<T>,
3696 ) -> Option<tree_sitter::Node<'a>> {
3697 let range = range.start.to_offset(self)..range.end.to_offset(self);
3698 let mut result: Option<tree_sitter::Node<'a>> = None;
3699
3700 for layer in self
3701 .syntax
3702 .layers_for_range(range.clone(), &self.text, true)
3703 {
3704 let mut cursor = layer.node().walk();
3705
3706 // Find the node that contains the range
3707 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3708 continue;
3709 }
3710
3711 // Look for the next sibling, moving up ancestor levels if needed
3712 loop {
3713 if cursor.goto_next_sibling() {
3714 let layer_result = cursor.node();
3715
3716 if let Some(previous_result) = &result {
3717 if previous_result.byte_range().start > layer_result.byte_range().start {
3718 continue;
3719 }
3720 }
3721 result = Some(layer_result);
3722 break;
3723 }
3724
3725 // No sibling found at this level, try moving up to parent
3726 if !cursor.goto_parent() {
3727 break;
3728 }
3729 }
3730 }
3731
3732 result
3733 }
3734
3735 /// Returns the root syntax node within the given row
3736 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3737 let start_offset = position.to_offset(self);
3738
3739 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3740
3741 let layer = self
3742 .syntax
3743 .layers_for_range(start_offset..start_offset, &self.text, true)
3744 .next()?;
3745
3746 let mut cursor = layer.node().walk();
3747
3748 // Descend to the first leaf that touches the start of the range.
3749 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3750 if cursor.node().end_byte() == start_offset {
3751 cursor.goto_next_sibling();
3752 }
3753 }
3754
3755 // Ascend to the root node within the same row.
3756 while cursor.goto_parent() {
3757 if cursor.node().start_position().row != row {
3758 break;
3759 }
3760 }
3761
3762 Some(cursor.node())
3763 }
3764
3765 /// Returns the outline for the buffer.
3766 ///
3767 /// This method allows passing an optional [`SyntaxTheme`] to
3768 /// syntax-highlight the returned symbols.
3769 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3770 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3771 }
3772
3773 /// Returns all the symbols that contain the given position.
3774 ///
3775 /// This method allows passing an optional [`SyntaxTheme`] to
3776 /// syntax-highlight the returned symbols.
3777 pub fn symbols_containing<T: ToOffset>(
3778 &self,
3779 position: T,
3780 theme: Option<&SyntaxTheme>,
3781 ) -> Vec<OutlineItem<Anchor>> {
3782 let position = position.to_offset(self);
3783 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3784 let end = self.clip_offset(position + 1, Bias::Right);
3785 let mut items = self.outline_items_containing(start..end, false, theme);
3786 let mut prev_depth = None;
3787 items.retain(|item| {
3788 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3789 prev_depth = Some(item.depth);
3790 result
3791 });
3792 items
3793 }
3794
3795 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3796 let range = range.to_offset(self);
3797 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3798 grammar.outline_config.as_ref().map(|c| &c.query)
3799 });
3800 let configs = matches
3801 .grammars()
3802 .iter()
3803 .map(|g| g.outline_config.as_ref().unwrap())
3804 .collect::<Vec<_>>();
3805
3806 while let Some(mat) = matches.peek() {
3807 let config = &configs[mat.grammar_index];
3808 let containing_item_node = maybe!({
3809 let item_node = mat.captures.iter().find_map(|cap| {
3810 if cap.index == config.item_capture_ix {
3811 Some(cap.node)
3812 } else {
3813 None
3814 }
3815 })?;
3816
3817 let item_byte_range = item_node.byte_range();
3818 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3819 None
3820 } else {
3821 Some(item_node)
3822 }
3823 });
3824
3825 if let Some(item_node) = containing_item_node {
3826 return Some(
3827 Point::from_ts_point(item_node.start_position())
3828 ..Point::from_ts_point(item_node.end_position()),
3829 );
3830 }
3831
3832 matches.advance();
3833 }
3834 None
3835 }
3836
3837 pub fn outline_items_containing<T: ToOffset>(
3838 &self,
3839 range: Range<T>,
3840 include_extra_context: bool,
3841 theme: Option<&SyntaxTheme>,
3842 ) -> Vec<OutlineItem<Anchor>> {
3843 self.outline_items_containing_internal(
3844 range,
3845 include_extra_context,
3846 theme,
3847 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3848 )
3849 }
3850
3851 pub fn outline_items_as_points_containing<T: ToOffset>(
3852 &self,
3853 range: Range<T>,
3854 include_extra_context: bool,
3855 theme: Option<&SyntaxTheme>,
3856 ) -> Vec<OutlineItem<Point>> {
3857 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3858 range
3859 })
3860 }
3861
3862 fn outline_items_containing_internal<T: ToOffset, U>(
3863 &self,
3864 range: Range<T>,
3865 include_extra_context: bool,
3866 theme: Option<&SyntaxTheme>,
3867 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3868 ) -> Vec<OutlineItem<U>> {
3869 let range = range.to_offset(self);
3870 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3871 grammar.outline_config.as_ref().map(|c| &c.query)
3872 });
3873
3874 let mut items = Vec::new();
3875 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3876 while let Some(mat) = matches.peek() {
3877 let config = matches.grammars()[mat.grammar_index]
3878 .outline_config
3879 .as_ref()
3880 .unwrap();
3881 if let Some(item) =
3882 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3883 {
3884 items.push(item);
3885 } else if let Some(capture) = mat
3886 .captures
3887 .iter()
3888 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3889 {
3890 let capture_range = capture.node.start_position()..capture.node.end_position();
3891 let mut capture_row_range =
3892 capture_range.start.row as u32..capture_range.end.row as u32;
3893 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3894 {
3895 capture_row_range.end -= 1;
3896 }
3897 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3898 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3899 last_row_range.end = capture_row_range.end;
3900 } else {
3901 annotation_row_ranges.push(capture_row_range);
3902 }
3903 } else {
3904 annotation_row_ranges.push(capture_row_range);
3905 }
3906 }
3907 matches.advance();
3908 }
3909
3910 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3911
3912 // Assign depths based on containment relationships and convert to anchors.
3913 let mut item_ends_stack = Vec::<Point>::new();
3914 let mut anchor_items = Vec::new();
3915 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3916 for item in items {
3917 while let Some(last_end) = item_ends_stack.last().copied() {
3918 if last_end < item.range.end {
3919 item_ends_stack.pop();
3920 } else {
3921 break;
3922 }
3923 }
3924
3925 let mut annotation_row_range = None;
3926 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3927 let row_preceding_item = item.range.start.row.saturating_sub(1);
3928 if next_annotation_row_range.end < row_preceding_item {
3929 annotation_row_ranges.next();
3930 } else {
3931 if next_annotation_row_range.end == row_preceding_item {
3932 annotation_row_range = Some(next_annotation_row_range.clone());
3933 annotation_row_ranges.next();
3934 }
3935 break;
3936 }
3937 }
3938
3939 anchor_items.push(OutlineItem {
3940 depth: item_ends_stack.len(),
3941 range: range_callback(self, item.range.clone()),
3942 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3943 text: item.text,
3944 highlight_ranges: item.highlight_ranges,
3945 name_ranges: item.name_ranges,
3946 body_range: item.body_range.map(|r| range_callback(self, r)),
3947 annotation_range: annotation_row_range.map(|annotation_range| {
3948 let point_range = Point::new(annotation_range.start, 0)
3949 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3950 range_callback(self, point_range)
3951 }),
3952 });
3953 item_ends_stack.push(item.range.end);
3954 }
3955
3956 anchor_items
3957 }
3958
3959 fn next_outline_item(
3960 &self,
3961 config: &OutlineConfig,
3962 mat: &SyntaxMapMatch,
3963 range: &Range<usize>,
3964 include_extra_context: bool,
3965 theme: Option<&SyntaxTheme>,
3966 ) -> Option<OutlineItem<Point>> {
3967 let item_node = mat.captures.iter().find_map(|cap| {
3968 if cap.index == config.item_capture_ix {
3969 Some(cap.node)
3970 } else {
3971 None
3972 }
3973 })?;
3974
3975 let item_byte_range = item_node.byte_range();
3976 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3977 return None;
3978 }
3979 let item_point_range = Point::from_ts_point(item_node.start_position())
3980 ..Point::from_ts_point(item_node.end_position());
3981
3982 let mut open_point = None;
3983 let mut close_point = None;
3984
3985 let mut buffer_ranges = Vec::new();
3986 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3987 let mut range = node.start_byte()..node.end_byte();
3988 let start = node.start_position();
3989 if node.end_position().row > start.row {
3990 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3991 }
3992
3993 if !range.is_empty() {
3994 buffer_ranges.push((range, node_is_name));
3995 }
3996 };
3997
3998 for capture in mat.captures {
3999 if capture.index == config.name_capture_ix {
4000 add_to_buffer_ranges(capture.node, true);
4001 } else if Some(capture.index) == config.context_capture_ix
4002 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4003 {
4004 add_to_buffer_ranges(capture.node, false);
4005 } else {
4006 if Some(capture.index) == config.open_capture_ix {
4007 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4008 } else if Some(capture.index) == config.close_capture_ix {
4009 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4010 }
4011 }
4012 }
4013
4014 if buffer_ranges.is_empty() {
4015 return None;
4016 }
4017 let source_range_for_text =
4018 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4019
4020 let mut text = String::new();
4021 let mut highlight_ranges = Vec::new();
4022 let mut name_ranges = Vec::new();
4023 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4024 let mut last_buffer_range_end = 0;
4025 for (buffer_range, is_name) in buffer_ranges {
4026 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4027 if space_added {
4028 text.push(' ');
4029 }
4030 let before_append_len = text.len();
4031 let mut offset = buffer_range.start;
4032 chunks.seek(buffer_range.clone());
4033 for mut chunk in chunks.by_ref() {
4034 if chunk.text.len() > buffer_range.end - offset {
4035 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4036 offset = buffer_range.end;
4037 } else {
4038 offset += chunk.text.len();
4039 }
4040 let style = chunk
4041 .syntax_highlight_id
4042 .zip(theme)
4043 .and_then(|(highlight, theme)| highlight.style(theme));
4044 if let Some(style) = style {
4045 let start = text.len();
4046 let end = start + chunk.text.len();
4047 highlight_ranges.push((start..end, style));
4048 }
4049 text.push_str(chunk.text);
4050 if offset >= buffer_range.end {
4051 break;
4052 }
4053 }
4054 if is_name {
4055 let after_append_len = text.len();
4056 let start = if space_added && !name_ranges.is_empty() {
4057 before_append_len - 1
4058 } else {
4059 before_append_len
4060 };
4061 name_ranges.push(start..after_append_len);
4062 }
4063 last_buffer_range_end = buffer_range.end;
4064 }
4065
4066 Some(OutlineItem {
4067 depth: 0, // We'll calculate the depth later
4068 range: item_point_range,
4069 source_range_for_text: source_range_for_text.to_point(self),
4070 text,
4071 highlight_ranges,
4072 name_ranges,
4073 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4074 annotation_range: None,
4075 })
4076 }
4077
4078 pub fn function_body_fold_ranges<T: ToOffset>(
4079 &self,
4080 within: Range<T>,
4081 ) -> impl Iterator<Item = Range<usize>> + '_ {
4082 self.text_object_ranges(within, TreeSitterOptions::default())
4083 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4084 }
4085
4086 /// For each grammar in the language, runs the provided
4087 /// [`tree_sitter::Query`] against the given range.
4088 pub fn matches(
4089 &self,
4090 range: Range<usize>,
4091 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4092 ) -> SyntaxMapMatches<'_> {
4093 self.syntax.matches(range, self, query)
4094 }
4095
4096 pub fn all_bracket_ranges(
4097 &self,
4098 range: Range<usize>,
4099 ) -> impl Iterator<Item = BracketMatch> + '_ {
4100 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4101 grammar.brackets_config.as_ref().map(|c| &c.query)
4102 });
4103 let configs = matches
4104 .grammars()
4105 .iter()
4106 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4107 .collect::<Vec<_>>();
4108
4109 iter::from_fn(move || {
4110 while let Some(mat) = matches.peek() {
4111 let mut open = None;
4112 let mut close = None;
4113 let config = &configs[mat.grammar_index];
4114 let pattern = &config.patterns[mat.pattern_index];
4115 for capture in mat.captures {
4116 if capture.index == config.open_capture_ix {
4117 open = Some(capture.node.byte_range());
4118 } else if capture.index == config.close_capture_ix {
4119 close = Some(capture.node.byte_range());
4120 }
4121 }
4122
4123 matches.advance();
4124
4125 let Some((open_range, close_range)) = open.zip(close) else {
4126 continue;
4127 };
4128
4129 let bracket_range = open_range.start..=close_range.end;
4130 if !bracket_range.overlaps(&range) {
4131 continue;
4132 }
4133
4134 return Some(BracketMatch {
4135 open_range,
4136 close_range,
4137 newline_only: pattern.newline_only,
4138 });
4139 }
4140 None
4141 })
4142 }
4143
4144 /// Returns bracket range pairs overlapping or adjacent to `range`
4145 pub fn bracket_ranges<T: ToOffset>(
4146 &self,
4147 range: Range<T>,
4148 ) -> impl Iterator<Item = BracketMatch> + '_ {
4149 // Find bracket pairs that *inclusively* contain the given range.
4150 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4151 self.all_bracket_ranges(range)
4152 .filter(|pair| !pair.newline_only)
4153 }
4154
4155 pub fn debug_variables_query<T: ToOffset>(
4156 &self,
4157 range: Range<T>,
4158 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4159 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4160
4161 let mut matches = self.syntax.matches_with_options(
4162 range.clone(),
4163 &self.text,
4164 TreeSitterOptions::default(),
4165 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4166 );
4167
4168 let configs = matches
4169 .grammars()
4170 .iter()
4171 .map(|grammar| grammar.debug_variables_config.as_ref())
4172 .collect::<Vec<_>>();
4173
4174 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4175
4176 iter::from_fn(move || {
4177 loop {
4178 while let Some(capture) = captures.pop() {
4179 if capture.0.overlaps(&range) {
4180 return Some(capture);
4181 }
4182 }
4183
4184 let mat = matches.peek()?;
4185
4186 let Some(config) = configs[mat.grammar_index].as_ref() else {
4187 matches.advance();
4188 continue;
4189 };
4190
4191 for capture in mat.captures {
4192 let Some(ix) = config
4193 .objects_by_capture_ix
4194 .binary_search_by_key(&capture.index, |e| e.0)
4195 .ok()
4196 else {
4197 continue;
4198 };
4199 let text_object = config.objects_by_capture_ix[ix].1;
4200 let byte_range = capture.node.byte_range();
4201
4202 let mut found = false;
4203 for (range, existing) in captures.iter_mut() {
4204 if existing == &text_object {
4205 range.start = range.start.min(byte_range.start);
4206 range.end = range.end.max(byte_range.end);
4207 found = true;
4208 break;
4209 }
4210 }
4211
4212 if !found {
4213 captures.push((byte_range, text_object));
4214 }
4215 }
4216
4217 matches.advance();
4218 }
4219 })
4220 }
4221
4222 pub fn text_object_ranges<T: ToOffset>(
4223 &self,
4224 range: Range<T>,
4225 options: TreeSitterOptions,
4226 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4227 let range =
4228 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4229
4230 let mut matches =
4231 self.syntax
4232 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4233 grammar.text_object_config.as_ref().map(|c| &c.query)
4234 });
4235
4236 let configs = matches
4237 .grammars()
4238 .iter()
4239 .map(|grammar| grammar.text_object_config.as_ref())
4240 .collect::<Vec<_>>();
4241
4242 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4243
4244 iter::from_fn(move || {
4245 loop {
4246 while let Some(capture) = captures.pop() {
4247 if capture.0.overlaps(&range) {
4248 return Some(capture);
4249 }
4250 }
4251
4252 let mat = matches.peek()?;
4253
4254 let Some(config) = configs[mat.grammar_index].as_ref() else {
4255 matches.advance();
4256 continue;
4257 };
4258
4259 for capture in mat.captures {
4260 let Some(ix) = config
4261 .text_objects_by_capture_ix
4262 .binary_search_by_key(&capture.index, |e| e.0)
4263 .ok()
4264 else {
4265 continue;
4266 };
4267 let text_object = config.text_objects_by_capture_ix[ix].1;
4268 let byte_range = capture.node.byte_range();
4269
4270 let mut found = false;
4271 for (range, existing) in captures.iter_mut() {
4272 if existing == &text_object {
4273 range.start = range.start.min(byte_range.start);
4274 range.end = range.end.max(byte_range.end);
4275 found = true;
4276 break;
4277 }
4278 }
4279
4280 if !found {
4281 captures.push((byte_range, text_object));
4282 }
4283 }
4284
4285 matches.advance();
4286 }
4287 })
4288 }
4289
4290 /// Returns enclosing bracket ranges containing the given range
4291 pub fn enclosing_bracket_ranges<T: ToOffset>(
4292 &self,
4293 range: Range<T>,
4294 ) -> impl Iterator<Item = BracketMatch> + '_ {
4295 let range = range.start.to_offset(self)..range.end.to_offset(self);
4296
4297 self.bracket_ranges(range.clone()).filter(move |pair| {
4298 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4299 })
4300 }
4301
4302 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4303 ///
4304 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4305 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4306 &self,
4307 range: Range<T>,
4308 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4309 ) -> Option<(Range<usize>, Range<usize>)> {
4310 let range = range.start.to_offset(self)..range.end.to_offset(self);
4311
4312 // Get the ranges of the innermost pair of brackets.
4313 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4314
4315 for pair in self.enclosing_bracket_ranges(range) {
4316 if let Some(range_filter) = range_filter
4317 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4318 {
4319 continue;
4320 }
4321
4322 let len = pair.close_range.end - pair.open_range.start;
4323
4324 if let Some((existing_open, existing_close)) = &result {
4325 let existing_len = existing_close.end - existing_open.start;
4326 if len > existing_len {
4327 continue;
4328 }
4329 }
4330
4331 result = Some((pair.open_range, pair.close_range));
4332 }
4333
4334 result
4335 }
4336
4337 /// Returns anchor ranges for any matches of the redaction query.
4338 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4339 /// will be run on the relevant section of the buffer.
4340 pub fn redacted_ranges<T: ToOffset>(
4341 &self,
4342 range: Range<T>,
4343 ) -> impl Iterator<Item = Range<usize>> + '_ {
4344 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4345 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4346 grammar
4347 .redactions_config
4348 .as_ref()
4349 .map(|config| &config.query)
4350 });
4351
4352 let configs = syntax_matches
4353 .grammars()
4354 .iter()
4355 .map(|grammar| grammar.redactions_config.as_ref())
4356 .collect::<Vec<_>>();
4357
4358 iter::from_fn(move || {
4359 let redacted_range = syntax_matches
4360 .peek()
4361 .and_then(|mat| {
4362 configs[mat.grammar_index].and_then(|config| {
4363 mat.captures
4364 .iter()
4365 .find(|capture| capture.index == config.redaction_capture_ix)
4366 })
4367 })
4368 .map(|mat| mat.node.byte_range());
4369 syntax_matches.advance();
4370 redacted_range
4371 })
4372 }
4373
4374 pub fn injections_intersecting_range<T: ToOffset>(
4375 &self,
4376 range: Range<T>,
4377 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4378 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4379
4380 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4381 grammar
4382 .injection_config
4383 .as_ref()
4384 .map(|config| &config.query)
4385 });
4386
4387 let configs = syntax_matches
4388 .grammars()
4389 .iter()
4390 .map(|grammar| grammar.injection_config.as_ref())
4391 .collect::<Vec<_>>();
4392
4393 iter::from_fn(move || {
4394 let ranges = syntax_matches.peek().and_then(|mat| {
4395 let config = &configs[mat.grammar_index]?;
4396 let content_capture_range = mat.captures.iter().find_map(|capture| {
4397 if capture.index == config.content_capture_ix {
4398 Some(capture.node.byte_range())
4399 } else {
4400 None
4401 }
4402 })?;
4403 let language = self.language_at(content_capture_range.start)?;
4404 Some((content_capture_range, language))
4405 });
4406 syntax_matches.advance();
4407 ranges
4408 })
4409 }
4410
4411 pub fn runnable_ranges(
4412 &self,
4413 offset_range: Range<usize>,
4414 ) -> impl Iterator<Item = RunnableRange> + '_ {
4415 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4416 grammar.runnable_config.as_ref().map(|config| &config.query)
4417 });
4418
4419 let test_configs = syntax_matches
4420 .grammars()
4421 .iter()
4422 .map(|grammar| grammar.runnable_config.as_ref())
4423 .collect::<Vec<_>>();
4424
4425 iter::from_fn(move || {
4426 loop {
4427 let mat = syntax_matches.peek()?;
4428
4429 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4430 let mut run_range = None;
4431 let full_range = mat.captures.iter().fold(
4432 Range {
4433 start: usize::MAX,
4434 end: 0,
4435 },
4436 |mut acc, next| {
4437 let byte_range = next.node.byte_range();
4438 if acc.start > byte_range.start {
4439 acc.start = byte_range.start;
4440 }
4441 if acc.end < byte_range.end {
4442 acc.end = byte_range.end;
4443 }
4444 acc
4445 },
4446 );
4447 if full_range.start > full_range.end {
4448 // We did not find a full spanning range of this match.
4449 return None;
4450 }
4451 let extra_captures: SmallVec<[_; 1]> =
4452 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4453 test_configs
4454 .extra_captures
4455 .get(capture.index as usize)
4456 .cloned()
4457 .and_then(|tag_name| match tag_name {
4458 RunnableCapture::Named(name) => {
4459 Some((capture.node.byte_range(), name))
4460 }
4461 RunnableCapture::Run => {
4462 let _ = run_range.insert(capture.node.byte_range());
4463 None
4464 }
4465 })
4466 }));
4467 let run_range = run_range?;
4468 let tags = test_configs
4469 .query
4470 .property_settings(mat.pattern_index)
4471 .iter()
4472 .filter_map(|property| {
4473 if *property.key == *"tag" {
4474 property
4475 .value
4476 .as_ref()
4477 .map(|value| RunnableTag(value.to_string().into()))
4478 } else {
4479 None
4480 }
4481 })
4482 .collect();
4483 let extra_captures = extra_captures
4484 .into_iter()
4485 .map(|(range, name)| {
4486 (
4487 name.to_string(),
4488 self.text_for_range(range).collect::<String>(),
4489 )
4490 })
4491 .collect();
4492 // All tags should have the same range.
4493 Some(RunnableRange {
4494 run_range,
4495 full_range,
4496 runnable: Runnable {
4497 tags,
4498 language: mat.language,
4499 buffer: self.remote_id(),
4500 },
4501 extra_captures,
4502 buffer_id: self.remote_id(),
4503 })
4504 });
4505
4506 syntax_matches.advance();
4507 if test_range.is_some() {
4508 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4509 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4510 return test_range;
4511 }
4512 }
4513 })
4514 }
4515
4516 /// Returns selections for remote peers intersecting the given range.
4517 #[allow(clippy::type_complexity)]
4518 pub fn selections_in_range(
4519 &self,
4520 range: Range<Anchor>,
4521 include_local: bool,
4522 ) -> impl Iterator<
4523 Item = (
4524 ReplicaId,
4525 bool,
4526 CursorShape,
4527 impl Iterator<Item = &Selection<Anchor>> + '_,
4528 ),
4529 > + '_ {
4530 self.remote_selections
4531 .iter()
4532 .filter(move |(replica_id, set)| {
4533 (include_local || **replica_id != self.text.replica_id())
4534 && !set.selections.is_empty()
4535 })
4536 .map(move |(replica_id, set)| {
4537 let start_ix = match set.selections.binary_search_by(|probe| {
4538 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4539 }) {
4540 Ok(ix) | Err(ix) => ix,
4541 };
4542 let end_ix = match set.selections.binary_search_by(|probe| {
4543 probe.start.cmp(&range.end, self).then(Ordering::Less)
4544 }) {
4545 Ok(ix) | Err(ix) => ix,
4546 };
4547
4548 (
4549 *replica_id,
4550 set.line_mode,
4551 set.cursor_shape,
4552 set.selections[start_ix..end_ix].iter(),
4553 )
4554 })
4555 }
4556
4557 /// Returns if the buffer contains any diagnostics.
4558 pub fn has_diagnostics(&self) -> bool {
4559 !self.diagnostics.is_empty()
4560 }
4561
4562 /// Returns all the diagnostics intersecting the given range.
4563 pub fn diagnostics_in_range<'a, T, O>(
4564 &'a self,
4565 search_range: Range<T>,
4566 reversed: bool,
4567 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4568 where
4569 T: 'a + Clone + ToOffset,
4570 O: 'a + FromAnchor,
4571 {
4572 let mut iterators: Vec<_> = self
4573 .diagnostics
4574 .iter()
4575 .map(|(_, collection)| {
4576 collection
4577 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4578 .peekable()
4579 })
4580 .collect();
4581
4582 std::iter::from_fn(move || {
4583 let (next_ix, _) = iterators
4584 .iter_mut()
4585 .enumerate()
4586 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4587 .min_by(|(_, a), (_, b)| {
4588 let cmp = a
4589 .range
4590 .start
4591 .cmp(&b.range.start, self)
4592 // when range is equal, sort by diagnostic severity
4593 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4594 // and stabilize order with group_id
4595 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4596 if reversed { cmp.reverse() } else { cmp }
4597 })?;
4598 iterators[next_ix]
4599 .next()
4600 .map(
4601 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4602 diagnostic,
4603 range: FromAnchor::from_anchor(&range.start, self)
4604 ..FromAnchor::from_anchor(&range.end, self),
4605 },
4606 )
4607 })
4608 }
4609
4610 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4611 /// should be used instead.
4612 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4613 &self.diagnostics
4614 }
4615
4616 /// Returns all the diagnostic groups associated with the given
4617 /// language server ID. If no language server ID is provided,
4618 /// all diagnostics groups are returned.
4619 pub fn diagnostic_groups(
4620 &self,
4621 language_server_id: Option<LanguageServerId>,
4622 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4623 let mut groups = Vec::new();
4624
4625 if let Some(language_server_id) = language_server_id {
4626 if let Ok(ix) = self
4627 .diagnostics
4628 .binary_search_by_key(&language_server_id, |e| e.0)
4629 {
4630 self.diagnostics[ix]
4631 .1
4632 .groups(language_server_id, &mut groups, self);
4633 }
4634 } else {
4635 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4636 diagnostics.groups(*language_server_id, &mut groups, self);
4637 }
4638 }
4639
4640 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4641 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4642 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4643 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4644 });
4645
4646 groups
4647 }
4648
4649 /// Returns an iterator over the diagnostics for the given group.
4650 pub fn diagnostic_group<O>(
4651 &self,
4652 group_id: usize,
4653 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4654 where
4655 O: FromAnchor + 'static,
4656 {
4657 self.diagnostics
4658 .iter()
4659 .flat_map(move |(_, set)| set.group(group_id, self))
4660 }
4661
4662 /// An integer version number that accounts for all updates besides
4663 /// the buffer's text itself (which is versioned via a version vector).
4664 pub fn non_text_state_update_count(&self) -> usize {
4665 self.non_text_state_update_count
4666 }
4667
4668 /// An integer version that changes when the buffer's syntax changes.
4669 pub fn syntax_update_count(&self) -> usize {
4670 self.syntax.update_count()
4671 }
4672
4673 /// Returns a snapshot of underlying file.
4674 pub fn file(&self) -> Option<&Arc<dyn File>> {
4675 self.file.as_ref()
4676 }
4677
4678 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4679 if let Some(file) = self.file() {
4680 if file.path().file_name().is_none() || include_root {
4681 Some(file.full_path(cx).to_string_lossy().into_owned())
4682 } else {
4683 Some(file.path().display(file.path_style(cx)).to_string())
4684 }
4685 } else {
4686 None
4687 }
4688 }
4689
4690 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4691 let query_str = query.fuzzy_contents;
4692 if query_str.is_some_and(|query| query.is_empty()) {
4693 return BTreeMap::default();
4694 }
4695
4696 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4697 language,
4698 override_id: None,
4699 }));
4700
4701 let mut query_ix = 0;
4702 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4703 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4704
4705 let mut words = BTreeMap::default();
4706 let mut current_word_start_ix = None;
4707 let mut chunk_ix = query.range.start;
4708 for chunk in self.chunks(query.range, false) {
4709 for (i, c) in chunk.text.char_indices() {
4710 let ix = chunk_ix + i;
4711 if classifier.is_word(c) {
4712 if current_word_start_ix.is_none() {
4713 current_word_start_ix = Some(ix);
4714 }
4715
4716 if let Some(query_chars) = &query_chars
4717 && query_ix < query_len
4718 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4719 {
4720 query_ix += 1;
4721 }
4722 continue;
4723 } else if let Some(word_start) = current_word_start_ix.take()
4724 && query_ix == query_len
4725 {
4726 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4727 let mut word_text = self.text_for_range(word_start..ix).peekable();
4728 let first_char = word_text
4729 .peek()
4730 .and_then(|first_chunk| first_chunk.chars().next());
4731 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4732 if !query.skip_digits
4733 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4734 {
4735 words.insert(word_text.collect(), word_range);
4736 }
4737 }
4738 query_ix = 0;
4739 }
4740 chunk_ix += chunk.text.len();
4741 }
4742
4743 words
4744 }
4745}
4746
4747pub struct WordsQuery<'a> {
4748 /// Only returns words with all chars from the fuzzy string in them.
4749 pub fuzzy_contents: Option<&'a str>,
4750 /// Skips words that start with a digit.
4751 pub skip_digits: bool,
4752 /// Buffer offset range, to look for words.
4753 pub range: Range<usize>,
4754}
4755
4756fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4757 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4758}
4759
4760fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4761 let mut result = IndentSize::spaces(0);
4762 for c in text {
4763 let kind = match c {
4764 ' ' => IndentKind::Space,
4765 '\t' => IndentKind::Tab,
4766 _ => break,
4767 };
4768 if result.len == 0 {
4769 result.kind = kind;
4770 }
4771 result.len += 1;
4772 }
4773 result
4774}
4775
4776impl Clone for BufferSnapshot {
4777 fn clone(&self) -> Self {
4778 Self {
4779 text: self.text.clone(),
4780 syntax: self.syntax.clone(),
4781 file: self.file.clone(),
4782 remote_selections: self.remote_selections.clone(),
4783 diagnostics: self.diagnostics.clone(),
4784 language: self.language.clone(),
4785 non_text_state_update_count: self.non_text_state_update_count,
4786 }
4787 }
4788}
4789
4790impl Deref for BufferSnapshot {
4791 type Target = text::BufferSnapshot;
4792
4793 fn deref(&self) -> &Self::Target {
4794 &self.text
4795 }
4796}
4797
4798unsafe impl Send for BufferChunks<'_> {}
4799
4800impl<'a> BufferChunks<'a> {
4801 pub(crate) fn new(
4802 text: &'a Rope,
4803 range: Range<usize>,
4804 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4805 diagnostics: bool,
4806 buffer_snapshot: Option<&'a BufferSnapshot>,
4807 ) -> Self {
4808 let mut highlights = None;
4809 if let Some((captures, highlight_maps)) = syntax {
4810 highlights = Some(BufferChunkHighlights {
4811 captures,
4812 next_capture: None,
4813 stack: Default::default(),
4814 highlight_maps,
4815 })
4816 }
4817
4818 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4819 let chunks = text.chunks_in_range(range.clone());
4820
4821 let mut this = BufferChunks {
4822 range,
4823 buffer_snapshot,
4824 chunks,
4825 diagnostic_endpoints,
4826 error_depth: 0,
4827 warning_depth: 0,
4828 information_depth: 0,
4829 hint_depth: 0,
4830 unnecessary_depth: 0,
4831 underline: true,
4832 highlights,
4833 };
4834 this.initialize_diagnostic_endpoints();
4835 this
4836 }
4837
4838 /// Seeks to the given byte offset in the buffer.
4839 pub fn seek(&mut self, range: Range<usize>) {
4840 let old_range = std::mem::replace(&mut self.range, range.clone());
4841 self.chunks.set_range(self.range.clone());
4842 if let Some(highlights) = self.highlights.as_mut() {
4843 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4844 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4845 highlights
4846 .stack
4847 .retain(|(end_offset, _)| *end_offset > range.start);
4848 if let Some(capture) = &highlights.next_capture
4849 && range.start >= capture.node.start_byte()
4850 {
4851 let next_capture_end = capture.node.end_byte();
4852 if range.start < next_capture_end {
4853 highlights.stack.push((
4854 next_capture_end,
4855 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4856 ));
4857 }
4858 highlights.next_capture.take();
4859 }
4860 } else if let Some(snapshot) = self.buffer_snapshot {
4861 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4862 *highlights = BufferChunkHighlights {
4863 captures,
4864 next_capture: None,
4865 stack: Default::default(),
4866 highlight_maps,
4867 };
4868 } else {
4869 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4870 // Seeking such BufferChunks is not supported.
4871 debug_assert!(
4872 false,
4873 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4874 );
4875 }
4876
4877 highlights.captures.set_byte_range(self.range.clone());
4878 self.initialize_diagnostic_endpoints();
4879 }
4880 }
4881
4882 fn initialize_diagnostic_endpoints(&mut self) {
4883 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4884 && let Some(buffer) = self.buffer_snapshot
4885 {
4886 let mut diagnostic_endpoints = Vec::new();
4887 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4888 diagnostic_endpoints.push(DiagnosticEndpoint {
4889 offset: entry.range.start,
4890 is_start: true,
4891 severity: entry.diagnostic.severity,
4892 is_unnecessary: entry.diagnostic.is_unnecessary,
4893 underline: entry.diagnostic.underline,
4894 });
4895 diagnostic_endpoints.push(DiagnosticEndpoint {
4896 offset: entry.range.end,
4897 is_start: false,
4898 severity: entry.diagnostic.severity,
4899 is_unnecessary: entry.diagnostic.is_unnecessary,
4900 underline: entry.diagnostic.underline,
4901 });
4902 }
4903 diagnostic_endpoints
4904 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4905 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4906 self.hint_depth = 0;
4907 self.error_depth = 0;
4908 self.warning_depth = 0;
4909 self.information_depth = 0;
4910 }
4911 }
4912
4913 /// The current byte offset in the buffer.
4914 pub fn offset(&self) -> usize {
4915 self.range.start
4916 }
4917
4918 pub fn range(&self) -> Range<usize> {
4919 self.range.clone()
4920 }
4921
4922 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4923 let depth = match endpoint.severity {
4924 DiagnosticSeverity::ERROR => &mut self.error_depth,
4925 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4926 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4927 DiagnosticSeverity::HINT => &mut self.hint_depth,
4928 _ => return,
4929 };
4930 if endpoint.is_start {
4931 *depth += 1;
4932 } else {
4933 *depth -= 1;
4934 }
4935
4936 if endpoint.is_unnecessary {
4937 if endpoint.is_start {
4938 self.unnecessary_depth += 1;
4939 } else {
4940 self.unnecessary_depth -= 1;
4941 }
4942 }
4943 }
4944
4945 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4946 if self.error_depth > 0 {
4947 Some(DiagnosticSeverity::ERROR)
4948 } else if self.warning_depth > 0 {
4949 Some(DiagnosticSeverity::WARNING)
4950 } else if self.information_depth > 0 {
4951 Some(DiagnosticSeverity::INFORMATION)
4952 } else if self.hint_depth > 0 {
4953 Some(DiagnosticSeverity::HINT)
4954 } else {
4955 None
4956 }
4957 }
4958
4959 fn current_code_is_unnecessary(&self) -> bool {
4960 self.unnecessary_depth > 0
4961 }
4962}
4963
4964impl<'a> Iterator for BufferChunks<'a> {
4965 type Item = Chunk<'a>;
4966
4967 fn next(&mut self) -> Option<Self::Item> {
4968 let mut next_capture_start = usize::MAX;
4969 let mut next_diagnostic_endpoint = usize::MAX;
4970
4971 if let Some(highlights) = self.highlights.as_mut() {
4972 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4973 if *parent_capture_end <= self.range.start {
4974 highlights.stack.pop();
4975 } else {
4976 break;
4977 }
4978 }
4979
4980 if highlights.next_capture.is_none() {
4981 highlights.next_capture = highlights.captures.next();
4982 }
4983
4984 while let Some(capture) = highlights.next_capture.as_ref() {
4985 if self.range.start < capture.node.start_byte() {
4986 next_capture_start = capture.node.start_byte();
4987 break;
4988 } else {
4989 let highlight_id =
4990 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4991 highlights
4992 .stack
4993 .push((capture.node.end_byte(), highlight_id));
4994 highlights.next_capture = highlights.captures.next();
4995 }
4996 }
4997 }
4998
4999 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5000 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5001 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5002 if endpoint.offset <= self.range.start {
5003 self.update_diagnostic_depths(endpoint);
5004 diagnostic_endpoints.next();
5005 self.underline = endpoint.underline;
5006 } else {
5007 next_diagnostic_endpoint = endpoint.offset;
5008 break;
5009 }
5010 }
5011 }
5012 self.diagnostic_endpoints = diagnostic_endpoints;
5013
5014 if let Some(ChunkBitmaps {
5015 text: chunk,
5016 chars: chars_map,
5017 tabs,
5018 }) = self.chunks.peek_with_bitmaps()
5019 {
5020 let chunk_start = self.range.start;
5021 let mut chunk_end = (self.chunks.offset() + chunk.len())
5022 .min(next_capture_start)
5023 .min(next_diagnostic_endpoint);
5024 let mut highlight_id = None;
5025 if let Some(highlights) = self.highlights.as_ref()
5026 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5027 {
5028 chunk_end = chunk_end.min(*parent_capture_end);
5029 highlight_id = Some(*parent_highlight_id);
5030 }
5031 let bit_start = chunk_start - self.chunks.offset();
5032 let bit_end = chunk_end - self.chunks.offset();
5033
5034 let slice = &chunk[bit_start..bit_end];
5035
5036 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5037 let tabs = (tabs >> bit_start) & mask;
5038 let chars = (chars_map >> bit_start) & mask;
5039
5040 self.range.start = chunk_end;
5041 if self.range.start == self.chunks.offset() + chunk.len() {
5042 self.chunks.next().unwrap();
5043 }
5044
5045 Some(Chunk {
5046 text: slice,
5047 syntax_highlight_id: highlight_id,
5048 underline: self.underline,
5049 diagnostic_severity: self.current_diagnostic_severity(),
5050 is_unnecessary: self.current_code_is_unnecessary(),
5051 tabs,
5052 chars,
5053 ..Chunk::default()
5054 })
5055 } else {
5056 None
5057 }
5058 }
5059}
5060
5061impl operation_queue::Operation for Operation {
5062 fn lamport_timestamp(&self) -> clock::Lamport {
5063 match self {
5064 Operation::Buffer(_) => {
5065 unreachable!("buffer operations should never be deferred at this layer")
5066 }
5067 Operation::UpdateDiagnostics {
5068 lamport_timestamp, ..
5069 }
5070 | Operation::UpdateSelections {
5071 lamport_timestamp, ..
5072 }
5073 | Operation::UpdateCompletionTriggers {
5074 lamport_timestamp, ..
5075 }
5076 | Operation::UpdateLineEnding {
5077 lamport_timestamp, ..
5078 } => *lamport_timestamp,
5079 }
5080 }
5081}
5082
5083impl Default for Diagnostic {
5084 fn default() -> Self {
5085 Self {
5086 source: Default::default(),
5087 source_kind: DiagnosticSourceKind::Other,
5088 code: None,
5089 code_description: None,
5090 severity: DiagnosticSeverity::ERROR,
5091 message: Default::default(),
5092 markdown: None,
5093 group_id: 0,
5094 is_primary: false,
5095 is_disk_based: false,
5096 is_unnecessary: false,
5097 underline: true,
5098 data: None,
5099 }
5100 }
5101}
5102
5103impl IndentSize {
5104 /// Returns an [`IndentSize`] representing the given spaces.
5105 pub fn spaces(len: u32) -> Self {
5106 Self {
5107 len,
5108 kind: IndentKind::Space,
5109 }
5110 }
5111
5112 /// Returns an [`IndentSize`] representing a tab.
5113 pub fn tab() -> Self {
5114 Self {
5115 len: 1,
5116 kind: IndentKind::Tab,
5117 }
5118 }
5119
5120 /// An iterator over the characters represented by this [`IndentSize`].
5121 pub fn chars(&self) -> impl Iterator<Item = char> {
5122 iter::repeat(self.char()).take(self.len as usize)
5123 }
5124
5125 /// The character representation of this [`IndentSize`].
5126 pub fn char(&self) -> char {
5127 match self.kind {
5128 IndentKind::Space => ' ',
5129 IndentKind::Tab => '\t',
5130 }
5131 }
5132
5133 /// Consumes the current [`IndentSize`] and returns a new one that has
5134 /// been shrunk or enlarged by the given size along the given direction.
5135 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5136 match direction {
5137 Ordering::Less => {
5138 if self.kind == size.kind && self.len >= size.len {
5139 self.len -= size.len;
5140 }
5141 }
5142 Ordering::Equal => {}
5143 Ordering::Greater => {
5144 if self.len == 0 {
5145 self = size;
5146 } else if self.kind == size.kind {
5147 self.len += size.len;
5148 }
5149 }
5150 }
5151 self
5152 }
5153
5154 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5155 match self.kind {
5156 IndentKind::Space => self.len as usize,
5157 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5158 }
5159 }
5160}
5161
5162#[cfg(any(test, feature = "test-support"))]
5163pub struct TestFile {
5164 pub path: Arc<RelPath>,
5165 pub root_name: String,
5166 pub local_root: Option<PathBuf>,
5167}
5168
5169#[cfg(any(test, feature = "test-support"))]
5170impl File for TestFile {
5171 fn path(&self) -> &Arc<RelPath> {
5172 &self.path
5173 }
5174
5175 fn full_path(&self, _: &gpui::App) -> PathBuf {
5176 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5177 }
5178
5179 fn as_local(&self) -> Option<&dyn LocalFile> {
5180 if self.local_root.is_some() {
5181 Some(self)
5182 } else {
5183 None
5184 }
5185 }
5186
5187 fn disk_state(&self) -> DiskState {
5188 unimplemented!()
5189 }
5190
5191 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5192 self.path().file_name().unwrap_or(self.root_name.as_ref())
5193 }
5194
5195 fn worktree_id(&self, _: &App) -> WorktreeId {
5196 WorktreeId::from_usize(0)
5197 }
5198
5199 fn to_proto(&self, _: &App) -> rpc::proto::File {
5200 unimplemented!()
5201 }
5202
5203 fn is_private(&self) -> bool {
5204 false
5205 }
5206
5207 fn path_style(&self, _cx: &App) -> PathStyle {
5208 PathStyle::local()
5209 }
5210}
5211
5212#[cfg(any(test, feature = "test-support"))]
5213impl LocalFile for TestFile {
5214 fn abs_path(&self, _cx: &App) -> PathBuf {
5215 PathBuf::from(self.local_root.as_ref().unwrap())
5216 .join(&self.root_name)
5217 .join(self.path.as_std_path())
5218 }
5219
5220 fn load(&self, _cx: &App) -> Task<Result<String>> {
5221 unimplemented!()
5222 }
5223
5224 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5225 unimplemented!()
5226 }
5227}
5228
5229pub(crate) fn contiguous_ranges(
5230 values: impl Iterator<Item = u32>,
5231 max_len: usize,
5232) -> impl Iterator<Item = Range<u32>> {
5233 let mut values = values;
5234 let mut current_range: Option<Range<u32>> = None;
5235 std::iter::from_fn(move || {
5236 loop {
5237 if let Some(value) = values.next() {
5238 if let Some(range) = &mut current_range
5239 && value == range.end
5240 && range.len() < max_len
5241 {
5242 range.end += 1;
5243 continue;
5244 }
5245
5246 let prev_range = current_range.clone();
5247 current_range = Some(value..(value + 1));
5248 if prev_range.is_some() {
5249 return prev_range;
5250 }
5251 } else {
5252 return current_range.take();
5253 }
5254 }
5255 })
5256}
5257
5258#[derive(Default, Debug)]
5259pub struct CharClassifier {
5260 scope: Option<LanguageScope>,
5261 scope_context: Option<CharScopeContext>,
5262 ignore_punctuation: bool,
5263}
5264
5265impl CharClassifier {
5266 pub fn new(scope: Option<LanguageScope>) -> Self {
5267 Self {
5268 scope,
5269 scope_context: None,
5270 ignore_punctuation: false,
5271 }
5272 }
5273
5274 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5275 Self {
5276 scope_context,
5277 ..self
5278 }
5279 }
5280
5281 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5282 Self {
5283 ignore_punctuation,
5284 ..self
5285 }
5286 }
5287
5288 pub fn is_whitespace(&self, c: char) -> bool {
5289 self.kind(c) == CharKind::Whitespace
5290 }
5291
5292 pub fn is_word(&self, c: char) -> bool {
5293 self.kind(c) == CharKind::Word
5294 }
5295
5296 pub fn is_punctuation(&self, c: char) -> bool {
5297 self.kind(c) == CharKind::Punctuation
5298 }
5299
5300 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5301 if c.is_alphanumeric() || c == '_' {
5302 return CharKind::Word;
5303 }
5304
5305 if let Some(scope) = &self.scope {
5306 let characters = match self.scope_context {
5307 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5308 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5309 None => scope.word_characters(),
5310 };
5311 if let Some(characters) = characters
5312 && characters.contains(&c)
5313 {
5314 return CharKind::Word;
5315 }
5316 }
5317
5318 if c.is_whitespace() {
5319 return CharKind::Whitespace;
5320 }
5321
5322 if ignore_punctuation {
5323 CharKind::Word
5324 } else {
5325 CharKind::Punctuation
5326 }
5327 }
5328
5329 pub fn kind(&self, c: char) -> CharKind {
5330 self.kind_with(c, self.ignore_punctuation)
5331 }
5332}
5333
5334/// Find all of the ranges of whitespace that occur at the ends of lines
5335/// in the given rope.
5336///
5337/// This could also be done with a regex search, but this implementation
5338/// avoids copying text.
5339pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5340 let mut ranges = Vec::new();
5341
5342 let mut offset = 0;
5343 let mut prev_chunk_trailing_whitespace_range = 0..0;
5344 for chunk in rope.chunks() {
5345 let mut prev_line_trailing_whitespace_range = 0..0;
5346 for (i, line) in chunk.split('\n').enumerate() {
5347 let line_end_offset = offset + line.len();
5348 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5349 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5350
5351 if i == 0 && trimmed_line_len == 0 {
5352 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5353 }
5354 if !prev_line_trailing_whitespace_range.is_empty() {
5355 ranges.push(prev_line_trailing_whitespace_range);
5356 }
5357
5358 offset = line_end_offset + 1;
5359 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5360 }
5361
5362 offset -= 1;
5363 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5364 }
5365
5366 if !prev_chunk_trailing_whitespace_range.is_empty() {
5367 ranges.push(prev_chunk_trailing_whitespace_range);
5368 }
5369
5370 ranges
5371}