1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
28 SharedString, StyledText, Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
176pub enum CursorShape {
177 /// A vertical bar
178 #[default]
179 Bar,
180 /// A block that surrounds the following character
181 Block,
182 /// An underline that runs along the following character
183 Underline,
184 /// A box drawn around the following character
185 Hollow,
186}
187
188impl From<settings::CursorShape> for CursorShape {
189 fn from(shape: settings::CursorShape) -> Self {
190 match shape {
191 settings::CursorShape::Bar => CursorShape::Bar,
192 settings::CursorShape::Block => CursorShape::Block,
193 settings::CursorShape::Underline => CursorShape::Underline,
194 settings::CursorShape::Hollow => CursorShape::Hollow,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200struct SelectionSet {
201 line_mode: bool,
202 cursor_shape: CursorShape,
203 selections: Arc<[Selection<Anchor>]>,
204 lamport_timestamp: clock::Lamport,
205}
206
207/// A diagnostic associated with a certain range of a buffer.
208#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
209pub struct Diagnostic {
210 /// The name of the service that produced this diagnostic.
211 pub source: Option<String>,
212 /// A machine-readable code that identifies this diagnostic.
213 pub code: Option<NumberOrString>,
214 pub code_description: Option<lsp::Uri>,
215 /// Whether this diagnostic is a hint, warning, or error.
216 pub severity: DiagnosticSeverity,
217 /// The human-readable message associated with this diagnostic.
218 pub message: String,
219 /// The human-readable message (in markdown format)
220 pub markdown: Option<String>,
221 /// An id that identifies the group to which this diagnostic belongs.
222 ///
223 /// When a language server produces a diagnostic with
224 /// one or more associated diagnostics, those diagnostics are all
225 /// assigned a single group ID.
226 pub group_id: usize,
227 /// Whether this diagnostic is the primary diagnostic for its group.
228 ///
229 /// In a given group, the primary diagnostic is the top-level diagnostic
230 /// returned by the language server. The non-primary diagnostics are the
231 /// associated diagnostics.
232 pub is_primary: bool,
233 /// Whether this diagnostic is considered to originate from an analysis of
234 /// files on disk, as opposed to any unsaved buffer contents. This is a
235 /// property of a given diagnostic source, and is configured for a given
236 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
237 /// for the language server.
238 pub is_disk_based: bool,
239 /// Whether this diagnostic marks unnecessary code.
240 pub is_unnecessary: bool,
241 /// Quick separation of diagnostics groups based by their source.
242 pub source_kind: DiagnosticSourceKind,
243 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
244 pub data: Option<Value>,
245 /// Whether to underline the corresponding text range in the editor.
246 pub underline: bool,
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
250pub enum DiagnosticSourceKind {
251 Pulled,
252 Pushed,
253 Other,
254}
255
256/// An operation used to synchronize this buffer with its other replicas.
257#[derive(Clone, Debug, PartialEq)]
258pub enum Operation {
259 /// A text operation.
260 Buffer(text::Operation),
261
262 /// An update to the buffer's diagnostics.
263 UpdateDiagnostics {
264 /// The id of the language server that produced the new diagnostics.
265 server_id: LanguageServerId,
266 /// The diagnostics.
267 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 },
271
272 /// An update to the most recent selections in this buffer.
273 UpdateSelections {
274 /// The selections.
275 selections: Arc<[Selection<Anchor>]>,
276 /// The buffer's lamport timestamp.
277 lamport_timestamp: clock::Lamport,
278 /// Whether the selections are in 'line mode'.
279 line_mode: bool,
280 /// The [`CursorShape`] associated with these selections.
281 cursor_shape: CursorShape,
282 },
283
284 /// An update to the characters that should trigger autocompletion
285 /// for this buffer.
286 UpdateCompletionTriggers {
287 /// The characters that trigger autocompletion.
288 triggers: Vec<String>,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 /// The language server ID.
292 server_id: LanguageServerId,
293 },
294
295 /// An update to the line ending type of this buffer.
296 UpdateLineEnding {
297 /// The line ending type.
298 line_ending: LineEnding,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 },
302}
303
304/// An event that occurs in a buffer.
305#[derive(Clone, Debug, PartialEq)]
306pub enum BufferEvent {
307 /// The buffer was changed in a way that must be
308 /// propagated to its other replicas.
309 Operation {
310 operation: Operation,
311 is_local: bool,
312 },
313 /// The buffer was edited.
314 Edited,
315 /// The buffer's `dirty` bit changed.
316 DirtyChanged,
317 /// The buffer was saved.
318 Saved,
319 /// The buffer's file was changed on disk.
320 FileHandleChanged,
321 /// The buffer was reloaded.
322 Reloaded,
323 /// The buffer is in need of a reload
324 ReloadNeeded,
325 /// The buffer's language was changed.
326 LanguageChanged,
327 /// The buffer's syntax trees were updated.
328 Reparsed,
329 /// The buffer's diagnostics were updated.
330 DiagnosticsUpdated,
331 /// The buffer gained or lost editing capabilities.
332 CapabilityChanged,
333}
334
335/// The file associated with a buffer.
336pub trait File: Send + Sync + Any {
337 /// Returns the [`LocalFile`] associated with this file, if the
338 /// file is local.
339 fn as_local(&self) -> Option<&dyn LocalFile>;
340
341 /// Returns whether this file is local.
342 fn is_local(&self) -> bool {
343 self.as_local().is_some()
344 }
345
346 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
347 /// only available in some states, such as modification time.
348 fn disk_state(&self) -> DiskState;
349
350 /// Returns the path of this file relative to the worktree's root directory.
351 fn path(&self) -> &Arc<RelPath>;
352
353 /// Returns the path of this file relative to the worktree's parent directory (this means it
354 /// includes the name of the worktree's root folder).
355 fn full_path(&self, cx: &App) -> PathBuf;
356
357 /// Returns the path style of this file.
358 fn path_style(&self, cx: &App) -> PathStyle;
359
360 /// Returns the last component of this handle's absolute path. If this handle refers to the root
361 /// of its worktree, then this method will return the name of the worktree itself.
362 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
363
364 /// Returns the id of the worktree to which this file belongs.
365 ///
366 /// This is needed for looking up project-specific settings.
367 fn worktree_id(&self, cx: &App) -> WorktreeId;
368
369 /// Converts this file into a protobuf message.
370 fn to_proto(&self, cx: &App) -> rpc::proto::File;
371
372 /// Return whether Zed considers this to be a private file.
373 fn is_private(&self) -> bool;
374}
375
376/// The file's storage status - whether it's stored (`Present`), and if so when it was last
377/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
378/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
379/// indicator for new files.
380#[derive(Copy, Clone, Debug, PartialEq)]
381pub enum DiskState {
382 /// File created in Zed that has not been saved.
383 New,
384 /// File present on the filesystem.
385 Present { mtime: MTime },
386 /// Deleted file that was previously present.
387 Deleted,
388}
389
390impl DiskState {
391 /// Returns the file's last known modification time on disk.
392 pub fn mtime(self) -> Option<MTime> {
393 match self {
394 DiskState::New => None,
395 DiskState::Present { mtime } => Some(mtime),
396 DiskState::Deleted => None,
397 }
398 }
399
400 pub fn exists(&self) -> bool {
401 match self {
402 DiskState::New => false,
403 DiskState::Present { .. } => true,
404 DiskState::Deleted => false,
405 }
406 }
407}
408
409/// The file associated with a buffer, in the case where the file is on the local disk.
410pub trait LocalFile: File {
411 /// Returns the absolute path of this file
412 fn abs_path(&self, cx: &App) -> PathBuf;
413
414 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
415 fn load(&self, cx: &App) -> Task<Result<String>>;
416
417 /// Loads the file's contents from disk.
418 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation column of the first line of each
433 /// insertion, if it has been copied.
434 ///
435 /// Knowing this makes it possible to preserve the relative indentation
436 /// of every line in the insertion from when it was copied.
437 ///
438 /// If the original indent column is `a`, and the first line of insertion
439 /// is then auto-indented to column `b`, then every other line of
440 /// the insertion will be auto-indented to column `b - a`
441 original_indent_columns: Vec<Option<u32>>,
442 },
443}
444
445#[derive(Clone)]
446struct AutoindentRequest {
447 before_edit: BufferSnapshot,
448 entries: Vec<AutoindentRequestEntry>,
449 is_block_mode: bool,
450 ignore_empty_lines: bool,
451}
452
453#[derive(Debug, Clone)]
454struct AutoindentRequestEntry {
455 /// A range of the buffer whose indentation should be adjusted.
456 range: Range<Anchor>,
457 /// Whether or not these lines should be considered brand new, for the
458 /// purpose of auto-indent. When text is not new, its indentation will
459 /// only be adjusted if the suggested indentation level has *changed*
460 /// since the edit was made.
461 first_line_is_new: bool,
462 indent_size: IndentSize,
463 original_indent_column: Option<u32>,
464}
465
466#[derive(Debug)]
467struct IndentSuggestion {
468 basis_row: u32,
469 delta: Ordering,
470 within_error: bool,
471}
472
473struct BufferChunkHighlights<'a> {
474 captures: SyntaxMapCaptures<'a>,
475 next_capture: Option<SyntaxMapCapture<'a>>,
476 stack: Vec<(usize, HighlightId)>,
477 highlight_maps: Vec<HighlightMap>,
478}
479
480/// An iterator that yields chunks of a buffer's text, along with their
481/// syntax highlights and diagnostic status.
482pub struct BufferChunks<'a> {
483 buffer_snapshot: Option<&'a BufferSnapshot>,
484 range: Range<usize>,
485 chunks: text::Chunks<'a>,
486 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
487 error_depth: usize,
488 warning_depth: usize,
489 information_depth: usize,
490 hint_depth: usize,
491 unnecessary_depth: usize,
492 underline: bool,
493 highlights: Option<BufferChunkHighlights<'a>>,
494}
495
496/// A chunk of a buffer's text, along with its syntax highlight and
497/// diagnostic status.
498#[derive(Clone, Debug, Default)]
499pub struct Chunk<'a> {
500 /// The text of the chunk.
501 pub text: &'a str,
502 /// The syntax highlighting style of the chunk.
503 pub syntax_highlight_id: Option<HighlightId>,
504 /// The highlight style that has been applied to this chunk in
505 /// the editor.
506 pub highlight_style: Option<HighlightStyle>,
507 /// The severity of diagnostic associated with this chunk, if any.
508 pub diagnostic_severity: Option<DiagnosticSeverity>,
509 /// A bitset of which characters are tabs in this string.
510 pub tabs: u128,
511 /// Bitmap of character indices in this chunk
512 pub chars: u128,
513 /// Whether this chunk of text is marked as unnecessary.
514 pub is_unnecessary: bool,
515 /// Whether this chunk of text was originally a tab character.
516 pub is_tab: bool,
517 /// Whether this chunk of text was originally an inlay.
518 pub is_inlay: bool,
519 /// Whether to underline the corresponding text range in the editor.
520 pub underline: bool,
521}
522
523/// A set of edits to a given version of a buffer, computed asynchronously.
524#[derive(Debug)]
525pub struct Diff {
526 pub base_version: clock::Global,
527 pub line_ending: LineEnding,
528 pub edits: Vec<(Range<usize>, Arc<str>)>,
529}
530
531#[derive(Debug, Clone, Copy)]
532pub(crate) struct DiagnosticEndpoint {
533 offset: usize,
534 is_start: bool,
535 underline: bool,
536 severity: DiagnosticSeverity,
537 is_unnecessary: bool,
538}
539
540/// A class of characters, used for characterizing a run of text.
541#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
542pub enum CharKind {
543 /// Whitespace.
544 Whitespace,
545 /// Punctuation.
546 Punctuation,
547 /// Word.
548 Word,
549}
550
551/// Context for character classification within a specific scope.
552#[derive(Copy, Clone, Eq, PartialEq, Debug)]
553pub enum CharScopeContext {
554 /// Character classification for completion queries.
555 ///
556 /// This context treats certain characters as word constituents that would
557 /// normally be considered punctuation, such as '-' in Tailwind classes
558 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
559 Completion,
560 /// Character classification for linked edits.
561 ///
562 /// This context handles characters that should be treated as part of
563 /// identifiers during linked editing operations, such as '.' in JSX
564 /// component names like `<Animated.View>`.
565 LinkedEdit,
566}
567
568/// A runnable is a set of data about a region that could be resolved into a task
569pub struct Runnable {
570 pub tags: SmallVec<[RunnableTag; 1]>,
571 pub language: Arc<Language>,
572 pub buffer: BufferId,
573}
574
575#[derive(Default, Clone, Debug)]
576pub struct HighlightedText {
577 pub text: SharedString,
578 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
579}
580
581#[derive(Default, Debug)]
582struct HighlightedTextBuilder {
583 pub text: String,
584 highlights: Vec<(Range<usize>, HighlightStyle)>,
585}
586
587impl HighlightedText {
588 pub fn from_buffer_range<T: ToOffset>(
589 range: Range<T>,
590 snapshot: &text::BufferSnapshot,
591 syntax_snapshot: &SyntaxSnapshot,
592 override_style: Option<HighlightStyle>,
593 syntax_theme: &SyntaxTheme,
594 ) -> Self {
595 let mut highlighted_text = HighlightedTextBuilder::default();
596 highlighted_text.add_text_from_buffer_range(
597 range,
598 snapshot,
599 syntax_snapshot,
600 override_style,
601 syntax_theme,
602 );
603 highlighted_text.build()
604 }
605
606 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
607 gpui::StyledText::new(self.text.clone())
608 .with_default_highlights(default_style, self.highlights.iter().cloned())
609 }
610
611 /// Returns the first line without leading whitespace unless highlighted
612 /// and a boolean indicating if there are more lines after
613 pub fn first_line_preview(self) -> (Self, bool) {
614 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
615 let first_line = &self.text[..newline_ix];
616
617 // Trim leading whitespace, unless an edit starts prior to it.
618 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
619 if let Some((first_highlight_range, _)) = self.highlights.first() {
620 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
621 }
622
623 let preview_text = &first_line[preview_start_ix..];
624 let preview_highlights = self
625 .highlights
626 .into_iter()
627 .skip_while(|(range, _)| range.end <= preview_start_ix)
628 .take_while(|(range, _)| range.start < newline_ix)
629 .filter_map(|(mut range, highlight)| {
630 range.start = range.start.saturating_sub(preview_start_ix);
631 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
632 if range.is_empty() {
633 None
634 } else {
635 Some((range, highlight))
636 }
637 });
638
639 let preview = Self {
640 text: SharedString::new(preview_text),
641 highlights: preview_highlights.collect(),
642 };
643
644 (preview, self.text.len() > newline_ix)
645 }
646}
647
648impl HighlightedTextBuilder {
649 pub fn build(self) -> HighlightedText {
650 HighlightedText {
651 text: self.text.into(),
652 highlights: self.highlights,
653 }
654 }
655
656 pub fn add_text_from_buffer_range<T: ToOffset>(
657 &mut self,
658 range: Range<T>,
659 snapshot: &text::BufferSnapshot,
660 syntax_snapshot: &SyntaxSnapshot,
661 override_style: Option<HighlightStyle>,
662 syntax_theme: &SyntaxTheme,
663 ) {
664 let range = range.to_offset(snapshot);
665 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
666 let start = self.text.len();
667 self.text.push_str(chunk.text);
668 let end = self.text.len();
669
670 if let Some(highlight_style) = chunk
671 .syntax_highlight_id
672 .and_then(|id| id.style(syntax_theme))
673 {
674 let highlight_style = override_style.map_or(highlight_style, |override_style| {
675 highlight_style.highlight(override_style)
676 });
677 self.highlights.push((start..end, highlight_style));
678 } else if let Some(override_style) = override_style {
679 self.highlights.push((start..end, override_style));
680 }
681 }
682 }
683
684 fn highlighted_chunks<'a>(
685 range: Range<usize>,
686 snapshot: &'a text::BufferSnapshot,
687 syntax_snapshot: &'a SyntaxSnapshot,
688 ) -> BufferChunks<'a> {
689 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
690 grammar
691 .highlights_config
692 .as_ref()
693 .map(|config| &config.query)
694 });
695
696 let highlight_maps = captures
697 .grammars()
698 .iter()
699 .map(|grammar| grammar.highlight_map())
700 .collect();
701
702 BufferChunks::new(
703 snapshot.as_rope(),
704 range,
705 Some((captures, highlight_maps)),
706 false,
707 None,
708 )
709 }
710}
711
712#[derive(Clone)]
713pub struct EditPreview {
714 old_snapshot: text::BufferSnapshot,
715 applied_edits_snapshot: text::BufferSnapshot,
716 syntax_snapshot: SyntaxSnapshot,
717}
718
719impl EditPreview {
720 pub fn highlight_edits(
721 &self,
722 current_snapshot: &BufferSnapshot,
723 edits: &[(Range<Anchor>, String)],
724 include_deletions: bool,
725 cx: &App,
726 ) -> HighlightedText {
727 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
728 return HighlightedText::default();
729 };
730
731 let mut highlighted_text = HighlightedTextBuilder::default();
732
733 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
734
735 let insertion_highlight_style = HighlightStyle {
736 background_color: Some(cx.theme().status().created_background),
737 ..Default::default()
738 };
739 let deletion_highlight_style = HighlightStyle {
740 background_color: Some(cx.theme().status().deleted_background),
741 ..Default::default()
742 };
743 let syntax_theme = cx.theme().syntax();
744
745 for (range, edit_text) in edits {
746 let edit_new_end_in_preview_snapshot = range
747 .end
748 .bias_right(&self.old_snapshot)
749 .to_offset(&self.applied_edits_snapshot);
750 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
751
752 let unchanged_range_in_preview_snapshot =
753 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
754 if !unchanged_range_in_preview_snapshot.is_empty() {
755 highlighted_text.add_text_from_buffer_range(
756 unchanged_range_in_preview_snapshot,
757 &self.applied_edits_snapshot,
758 &self.syntax_snapshot,
759 None,
760 syntax_theme,
761 );
762 }
763
764 let range_in_current_snapshot = range.to_offset(current_snapshot);
765 if include_deletions && !range_in_current_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 range_in_current_snapshot,
768 ¤t_snapshot.text,
769 ¤t_snapshot.syntax,
770 Some(deletion_highlight_style),
771 syntax_theme,
772 );
773 }
774
775 if !edit_text.is_empty() {
776 highlighted_text.add_text_from_buffer_range(
777 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
778 &self.applied_edits_snapshot,
779 &self.syntax_snapshot,
780 Some(insertion_highlight_style),
781 syntax_theme,
782 );
783 }
784
785 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
786 }
787
788 highlighted_text.add_text_from_buffer_range(
789 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
790 &self.applied_edits_snapshot,
791 &self.syntax_snapshot,
792 None,
793 syntax_theme,
794 );
795
796 highlighted_text.build()
797 }
798
799 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
800 let (first, _) = edits.first()?;
801 let (last, _) = edits.last()?;
802
803 let start = first
804 .start
805 .bias_left(&self.old_snapshot)
806 .to_point(&self.applied_edits_snapshot);
807 let end = last
808 .end
809 .bias_right(&self.old_snapshot)
810 .to_point(&self.applied_edits_snapshot);
811
812 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
813 let range = Point::new(start.row, 0)
814 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
815
816 Some(range.to_offset(&self.applied_edits_snapshot))
817 }
818}
819
820#[derive(Clone, Debug, PartialEq, Eq)]
821pub struct BracketMatch {
822 pub open_range: Range<usize>,
823 pub close_range: Range<usize>,
824 pub newline_only: bool,
825}
826
827impl Buffer {
828 /// Create a new buffer with the given base text.
829 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
830 Self::build(
831 TextBuffer::new(
832 ReplicaId::LOCAL,
833 cx.entity_id().as_non_zero_u64().into(),
834 base_text.into(),
835 &cx.background_executor(),
836 ),
837 None,
838 Capability::ReadWrite,
839 )
840 }
841
842 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
843 pub fn local_normalized(
844 base_text_normalized: Rope,
845 line_ending: LineEnding,
846 cx: &Context<Self>,
847 ) -> Self {
848 Self::build(
849 TextBuffer::new_normalized(
850 ReplicaId::LOCAL,
851 cx.entity_id().as_non_zero_u64().into(),
852 line_ending,
853 base_text_normalized,
854 ),
855 None,
856 Capability::ReadWrite,
857 )
858 }
859
860 /// Create a new buffer that is a replica of a remote buffer.
861 pub fn remote(
862 remote_id: BufferId,
863 replica_id: ReplicaId,
864 capability: Capability,
865 base_text: impl Into<String>,
866 cx: &BackgroundExecutor,
867 ) -> Self {
868 Self::build(
869 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
870 None,
871 capability,
872 )
873 }
874
875 /// Create a new buffer that is a replica of a remote buffer, populating its
876 /// state from the given protobuf message.
877 pub fn from_proto(
878 replica_id: ReplicaId,
879 capability: Capability,
880 message: proto::BufferState,
881 file: Option<Arc<dyn File>>,
882 cx: &BackgroundExecutor,
883 ) -> Result<Self> {
884 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
885 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
886 let mut this = Self::build(buffer, file, capability);
887 this.text.set_line_ending(proto::deserialize_line_ending(
888 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
889 ));
890 this.saved_version = proto::deserialize_version(&message.saved_version);
891 this.saved_mtime = message.saved_mtime.map(|time| time.into());
892 Ok(this)
893 }
894
895 /// Serialize the buffer's state to a protobuf message.
896 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
897 proto::BufferState {
898 id: self.remote_id().into(),
899 file: self.file.as_ref().map(|f| f.to_proto(cx)),
900 base_text: self.base_text().to_string(),
901 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
902 saved_version: proto::serialize_version(&self.saved_version),
903 saved_mtime: self.saved_mtime.map(|time| time.into()),
904 }
905 }
906
907 /// Serialize as protobufs all of the changes to the buffer since the given version.
908 pub fn serialize_ops(
909 &self,
910 since: Option<clock::Global>,
911 cx: &App,
912 ) -> Task<Vec<proto::Operation>> {
913 let mut operations = Vec::new();
914 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
915
916 operations.extend(self.remote_selections.iter().map(|(_, set)| {
917 proto::serialize_operation(&Operation::UpdateSelections {
918 selections: set.selections.clone(),
919 lamport_timestamp: set.lamport_timestamp,
920 line_mode: set.line_mode,
921 cursor_shape: set.cursor_shape,
922 })
923 }));
924
925 for (server_id, diagnostics) in &self.diagnostics {
926 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
927 lamport_timestamp: self.diagnostics_timestamp,
928 server_id: *server_id,
929 diagnostics: diagnostics.iter().cloned().collect(),
930 }));
931 }
932
933 for (server_id, completions) in &self.completion_triggers_per_language_server {
934 operations.push(proto::serialize_operation(
935 &Operation::UpdateCompletionTriggers {
936 triggers: completions.iter().cloned().collect(),
937 lamport_timestamp: self.completion_triggers_timestamp,
938 server_id: *server_id,
939 },
940 ));
941 }
942
943 let text_operations = self.text.operations().clone();
944 cx.background_spawn(async move {
945 let since = since.unwrap_or_default();
946 operations.extend(
947 text_operations
948 .iter()
949 .filter(|(_, op)| !since.observed(op.timestamp()))
950 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
951 );
952 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
953 operations
954 })
955 }
956
957 /// Assign a language to the buffer, returning the buffer.
958 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
959 self.set_language(Some(language), cx);
960 self
961 }
962
963 /// Returns the [`Capability`] of this buffer.
964 pub fn capability(&self) -> Capability {
965 self.capability
966 }
967
968 /// Whether this buffer can only be read.
969 pub fn read_only(&self) -> bool {
970 self.capability == Capability::ReadOnly
971 }
972
973 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
974 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
975 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
976 let snapshot = buffer.snapshot();
977 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
978 Self {
979 saved_mtime,
980 saved_version: buffer.version(),
981 preview_version: buffer.version(),
982 reload_task: None,
983 transaction_depth: 0,
984 was_dirty_before_starting_transaction: None,
985 has_unsaved_edits: Cell::new((buffer.version(), false)),
986 text: buffer,
987 branch_state: None,
988 file,
989 capability,
990 syntax_map,
991 reparse: None,
992 non_text_state_update_count: 0,
993 sync_parse_timeout: Duration::from_millis(1),
994 parse_status: watch::channel(ParseStatus::Idle),
995 autoindent_requests: Default::default(),
996 wait_for_autoindent_txs: Default::default(),
997 pending_autoindent: Default::default(),
998 language: None,
999 remote_selections: Default::default(),
1000 diagnostics: Default::default(),
1001 diagnostics_timestamp: Lamport::MIN,
1002 completion_triggers: Default::default(),
1003 completion_triggers_per_language_server: Default::default(),
1004 completion_triggers_timestamp: Lamport::MIN,
1005 deferred_ops: OperationQueue::new(),
1006 has_conflict: false,
1007 change_bits: Default::default(),
1008 _subscriptions: Vec::new(),
1009 }
1010 }
1011
1012 pub fn build_snapshot(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> impl Future<Output = BufferSnapshot> + use<> {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 async move {
1021 let text =
1022 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1023 .snapshot();
1024 let mut syntax = SyntaxMap::new(&text).snapshot();
1025 if let Some(language) = language.clone() {
1026 let language_registry = language_registry.clone();
1027 syntax.reparse(&text, language_registry, language);
1028 }
1029 BufferSnapshot {
1030 text,
1031 syntax,
1032 file: None,
1033 diagnostics: Default::default(),
1034 remote_selections: Default::default(),
1035 language,
1036 non_text_state_update_count: 0,
1037 }
1038 }
1039 }
1040
1041 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1042 let entity_id = cx.reserve_entity::<Self>().entity_id();
1043 let buffer_id = entity_id.as_non_zero_u64().into();
1044 let text = TextBuffer::new_normalized(
1045 ReplicaId::LOCAL,
1046 buffer_id,
1047 Default::default(),
1048 Rope::new(),
1049 )
1050 .snapshot();
1051 let syntax = SyntaxMap::new(&text).snapshot();
1052 BufferSnapshot {
1053 text,
1054 syntax,
1055 file: None,
1056 diagnostics: Default::default(),
1057 remote_selections: Default::default(),
1058 language: None,
1059 non_text_state_update_count: 0,
1060 }
1061 }
1062
1063 #[cfg(any(test, feature = "test-support"))]
1064 pub fn build_snapshot_sync(
1065 text: Rope,
1066 language: Option<Arc<Language>>,
1067 language_registry: Option<Arc<LanguageRegistry>>,
1068 cx: &mut App,
1069 ) -> BufferSnapshot {
1070 let entity_id = cx.reserve_entity::<Self>().entity_id();
1071 let buffer_id = entity_id.as_non_zero_u64().into();
1072 let text =
1073 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1074 .snapshot();
1075 let mut syntax = SyntaxMap::new(&text).snapshot();
1076 if let Some(language) = language.clone() {
1077 syntax.reparse(&text, language_registry, language);
1078 }
1079 BufferSnapshot {
1080 text,
1081 syntax,
1082 file: None,
1083 diagnostics: Default::default(),
1084 remote_selections: Default::default(),
1085 language,
1086 non_text_state_update_count: 0,
1087 }
1088 }
1089
1090 /// Retrieve a snapshot of the buffer's current state. This is computationally
1091 /// cheap, and allows reading from the buffer on a background thread.
1092 pub fn snapshot(&self) -> BufferSnapshot {
1093 let text = self.text.snapshot();
1094 let mut syntax_map = self.syntax_map.lock();
1095 syntax_map.interpolate(&text);
1096 let syntax = syntax_map.snapshot();
1097
1098 BufferSnapshot {
1099 text,
1100 syntax,
1101 file: self.file.clone(),
1102 remote_selections: self.remote_selections.clone(),
1103 diagnostics: self.diagnostics.clone(),
1104 language: self.language.clone(),
1105 non_text_state_update_count: self.non_text_state_update_count,
1106 }
1107 }
1108
1109 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1110 let this = cx.entity();
1111 cx.new(|cx| {
1112 let mut branch = Self {
1113 branch_state: Some(BufferBranchState {
1114 base_buffer: this.clone(),
1115 merged_operations: Default::default(),
1116 }),
1117 language: self.language.clone(),
1118 has_conflict: self.has_conflict,
1119 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1120 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1121 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1122 };
1123 if let Some(language_registry) = self.language_registry() {
1124 branch.set_language_registry(language_registry);
1125 }
1126
1127 // Reparse the branch buffer so that we get syntax highlighting immediately.
1128 branch.reparse(cx);
1129
1130 branch
1131 })
1132 }
1133
1134 pub fn preview_edits(
1135 &self,
1136 edits: Arc<[(Range<Anchor>, String)]>,
1137 cx: &App,
1138 ) -> Task<EditPreview> {
1139 let registry = self.language_registry();
1140 let language = self.language().cloned();
1141 let old_snapshot = self.text.snapshot();
1142 let mut branch_buffer = self.text.branch();
1143 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1144 let executor = cx.background_executor().clone();
1145 cx.background_spawn(async move {
1146 if !edits.is_empty() {
1147 if let Some(language) = language.clone() {
1148 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1149 }
1150
1151 branch_buffer.edit(edits.iter().cloned(), &executor);
1152 let snapshot = branch_buffer.snapshot();
1153 syntax_snapshot.interpolate(&snapshot);
1154
1155 if let Some(language) = language {
1156 syntax_snapshot.reparse(&snapshot, registry, language);
1157 }
1158 }
1159 EditPreview {
1160 old_snapshot,
1161 applied_edits_snapshot: branch_buffer.snapshot(),
1162 syntax_snapshot,
1163 }
1164 })
1165 }
1166
1167 /// Applies all of the changes in this buffer that intersect any of the
1168 /// given `ranges` to its base buffer.
1169 ///
1170 /// If `ranges` is empty, then all changes will be applied. This buffer must
1171 /// be a branch buffer to call this method.
1172 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1173 let Some(base_buffer) = self.base_buffer() else {
1174 debug_panic!("not a branch buffer");
1175 return;
1176 };
1177
1178 let mut ranges = if ranges.is_empty() {
1179 &[0..usize::MAX]
1180 } else {
1181 ranges.as_slice()
1182 }
1183 .iter()
1184 .peekable();
1185
1186 let mut edits = Vec::new();
1187 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1188 let mut is_included = false;
1189 while let Some(range) = ranges.peek() {
1190 if range.end < edit.new.start {
1191 ranges.next().unwrap();
1192 } else {
1193 if range.start <= edit.new.end {
1194 is_included = true;
1195 }
1196 break;
1197 }
1198 }
1199
1200 if is_included {
1201 edits.push((
1202 edit.old.clone(),
1203 self.text_for_range(edit.new.clone()).collect::<String>(),
1204 ));
1205 }
1206 }
1207
1208 let operation = base_buffer.update(cx, |base_buffer, cx| {
1209 // cx.emit(BufferEvent::DiffBaseChanged);
1210 base_buffer.edit(edits, None, cx)
1211 });
1212
1213 if let Some(operation) = operation
1214 && let Some(BufferBranchState {
1215 merged_operations, ..
1216 }) = &mut self.branch_state
1217 {
1218 merged_operations.push(operation);
1219 }
1220 }
1221
1222 fn on_base_buffer_event(
1223 &mut self,
1224 _: Entity<Buffer>,
1225 event: &BufferEvent,
1226 cx: &mut Context<Self>,
1227 ) {
1228 let BufferEvent::Operation { operation, .. } = event else {
1229 return;
1230 };
1231 let Some(BufferBranchState {
1232 merged_operations, ..
1233 }) = &mut self.branch_state
1234 else {
1235 return;
1236 };
1237
1238 let mut operation_to_undo = None;
1239 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1240 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1241 {
1242 merged_operations.remove(ix);
1243 operation_to_undo = Some(operation.timestamp);
1244 }
1245
1246 self.apply_ops([operation.clone()], cx);
1247
1248 if let Some(timestamp) = operation_to_undo {
1249 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1250 self.undo_operations(counts, cx);
1251 }
1252 }
1253
1254 #[cfg(test)]
1255 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1256 &self.text
1257 }
1258
1259 /// Retrieve a snapshot of the buffer's raw text, without any
1260 /// language-related state like the syntax tree or diagnostics.
1261 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1262 self.text.snapshot()
1263 }
1264
1265 /// The file associated with the buffer, if any.
1266 pub fn file(&self) -> Option<&Arc<dyn File>> {
1267 self.file.as_ref()
1268 }
1269
1270 /// The version of the buffer that was last saved or reloaded from disk.
1271 pub fn saved_version(&self) -> &clock::Global {
1272 &self.saved_version
1273 }
1274
1275 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1276 pub fn saved_mtime(&self) -> Option<MTime> {
1277 self.saved_mtime
1278 }
1279
1280 /// Assign a language to the buffer.
1281 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1282 self.non_text_state_update_count += 1;
1283 self.syntax_map.lock().clear(&self.text);
1284 self.language = language;
1285 self.was_changed();
1286 self.reparse(cx);
1287 cx.emit(BufferEvent::LanguageChanged);
1288 }
1289
1290 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1291 /// other languages if parts of the buffer are written in different languages.
1292 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1293 self.syntax_map
1294 .lock()
1295 .set_language_registry(language_registry);
1296 }
1297
1298 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1299 self.syntax_map.lock().language_registry()
1300 }
1301
1302 /// Assign the line ending type to the buffer.
1303 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1304 self.text.set_line_ending(line_ending);
1305
1306 let lamport_timestamp = self.text.lamport_clock.tick();
1307 self.send_operation(
1308 Operation::UpdateLineEnding {
1309 line_ending,
1310 lamport_timestamp,
1311 },
1312 true,
1313 cx,
1314 );
1315 }
1316
1317 /// Assign the buffer a new [`Capability`].
1318 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1319 if self.capability != capability {
1320 self.capability = capability;
1321 cx.emit(BufferEvent::CapabilityChanged)
1322 }
1323 }
1324
1325 /// This method is called to signal that the buffer has been saved.
1326 pub fn did_save(
1327 &mut self,
1328 version: clock::Global,
1329 mtime: Option<MTime>,
1330 cx: &mut Context<Self>,
1331 ) {
1332 self.saved_version = version.clone();
1333 self.has_unsaved_edits.set((version, false));
1334 self.has_conflict = false;
1335 self.saved_mtime = mtime;
1336 self.was_changed();
1337 cx.emit(BufferEvent::Saved);
1338 cx.notify();
1339 }
1340
1341 /// Reloads the contents of the buffer from disk.
1342 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1343 let (tx, rx) = futures::channel::oneshot::channel();
1344 let prev_version = self.text.version();
1345 self.reload_task = Some(cx.spawn(async move |this, cx| {
1346 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1347 let file = this.file.as_ref()?.as_local()?;
1348
1349 Some((file.disk_state().mtime(), file.load(cx)))
1350 })?
1351 else {
1352 return Ok(());
1353 };
1354
1355 let new_text = new_text.await?;
1356 let diff = this
1357 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1358 .await;
1359 this.update(cx, |this, cx| {
1360 if this.version() == diff.base_version {
1361 this.finalize_last_transaction();
1362 this.apply_diff(diff, cx);
1363 tx.send(this.finalize_last_transaction().cloned()).ok();
1364 this.has_conflict = false;
1365 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1366 } else {
1367 if !diff.edits.is_empty()
1368 || this
1369 .edits_since::<usize>(&diff.base_version)
1370 .next()
1371 .is_some()
1372 {
1373 this.has_conflict = true;
1374 }
1375
1376 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1377 }
1378
1379 this.reload_task.take();
1380 })
1381 }));
1382 rx
1383 }
1384
1385 /// This method is called to signal that the buffer has been reloaded.
1386 pub fn did_reload(
1387 &mut self,
1388 version: clock::Global,
1389 line_ending: LineEnding,
1390 mtime: Option<MTime>,
1391 cx: &mut Context<Self>,
1392 ) {
1393 self.saved_version = version;
1394 self.has_unsaved_edits
1395 .set((self.saved_version.clone(), false));
1396 self.text.set_line_ending(line_ending);
1397 self.saved_mtime = mtime;
1398 cx.emit(BufferEvent::Reloaded);
1399 cx.notify();
1400 }
1401
1402 /// Updates the [`File`] backing this buffer. This should be called when
1403 /// the file has changed or has been deleted.
1404 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1405 let was_dirty = self.is_dirty();
1406 let mut file_changed = false;
1407
1408 if let Some(old_file) = self.file.as_ref() {
1409 if new_file.path() != old_file.path() {
1410 file_changed = true;
1411 }
1412
1413 let old_state = old_file.disk_state();
1414 let new_state = new_file.disk_state();
1415 if old_state != new_state {
1416 file_changed = true;
1417 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1418 cx.emit(BufferEvent::ReloadNeeded)
1419 }
1420 }
1421 } else {
1422 file_changed = true;
1423 };
1424
1425 self.file = Some(new_file);
1426 if file_changed {
1427 self.was_changed();
1428 self.non_text_state_update_count += 1;
1429 if was_dirty != self.is_dirty() {
1430 cx.emit(BufferEvent::DirtyChanged);
1431 }
1432 cx.emit(BufferEvent::FileHandleChanged);
1433 cx.notify();
1434 }
1435 }
1436
1437 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1438 Some(self.branch_state.as_ref()?.base_buffer.clone())
1439 }
1440
1441 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1442 pub fn language(&self) -> Option<&Arc<Language>> {
1443 self.language.as_ref()
1444 }
1445
1446 /// Returns the [`Language`] at the given location.
1447 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1448 let offset = position.to_offset(self);
1449 let mut is_first = true;
1450 let start_anchor = self.anchor_before(offset);
1451 let end_anchor = self.anchor_after(offset);
1452 self.syntax_map
1453 .lock()
1454 .layers_for_range(offset..offset, &self.text, false)
1455 .filter(|layer| {
1456 if is_first {
1457 is_first = false;
1458 return true;
1459 }
1460
1461 layer
1462 .included_sub_ranges
1463 .map(|sub_ranges| {
1464 sub_ranges.iter().any(|sub_range| {
1465 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1466 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1467 !is_before_start && !is_after_end
1468 })
1469 })
1470 .unwrap_or(true)
1471 })
1472 .last()
1473 .map(|info| info.language.clone())
1474 .or_else(|| self.language.clone())
1475 }
1476
1477 /// Returns each [`Language`] for the active syntax layers at the given location.
1478 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1479 let offset = position.to_offset(self);
1480 let mut languages: Vec<Arc<Language>> = self
1481 .syntax_map
1482 .lock()
1483 .layers_for_range(offset..offset, &self.text, false)
1484 .map(|info| info.language.clone())
1485 .collect();
1486
1487 if languages.is_empty()
1488 && let Some(buffer_language) = self.language()
1489 {
1490 languages.push(buffer_language.clone());
1491 }
1492
1493 languages
1494 }
1495
1496 /// An integer version number that accounts for all updates besides
1497 /// the buffer's text itself (which is versioned via a version vector).
1498 pub fn non_text_state_update_count(&self) -> usize {
1499 self.non_text_state_update_count
1500 }
1501
1502 /// Whether the buffer is being parsed in the background.
1503 #[cfg(any(test, feature = "test-support"))]
1504 pub fn is_parsing(&self) -> bool {
1505 self.reparse.is_some()
1506 }
1507
1508 /// Indicates whether the buffer contains any regions that may be
1509 /// written in a language that hasn't been loaded yet.
1510 pub fn contains_unknown_injections(&self) -> bool {
1511 self.syntax_map.lock().contains_unknown_injections()
1512 }
1513
1514 #[cfg(any(test, feature = "test-support"))]
1515 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1516 self.sync_parse_timeout = timeout;
1517 }
1518
1519 /// Called after an edit to synchronize the buffer's main parse tree with
1520 /// the buffer's new underlying state.
1521 ///
1522 /// Locks the syntax map and interpolates the edits since the last reparse
1523 /// into the foreground syntax tree.
1524 ///
1525 /// Then takes a stable snapshot of the syntax map before unlocking it.
1526 /// The snapshot with the interpolated edits is sent to a background thread,
1527 /// where we ask Tree-sitter to perform an incremental parse.
1528 ///
1529 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1530 /// waiting on the parse to complete. As soon as it completes, we proceed
1531 /// synchronously, unless a 1ms timeout elapses.
1532 ///
1533 /// If we time out waiting on the parse, we spawn a second task waiting
1534 /// until the parse does complete and return with the interpolated tree still
1535 /// in the foreground. When the background parse completes, call back into
1536 /// the main thread and assign the foreground parse state.
1537 ///
1538 /// If the buffer or grammar changed since the start of the background parse,
1539 /// initiate an additional reparse recursively. To avoid concurrent parses
1540 /// for the same buffer, we only initiate a new parse if we are not already
1541 /// parsing in the background.
1542 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1543 if self.reparse.is_some() {
1544 return;
1545 }
1546 let language = if let Some(language) = self.language.clone() {
1547 language
1548 } else {
1549 return;
1550 };
1551
1552 let text = self.text_snapshot();
1553 let parsed_version = self.version();
1554
1555 let mut syntax_map = self.syntax_map.lock();
1556 syntax_map.interpolate(&text);
1557 let language_registry = syntax_map.language_registry();
1558 let mut syntax_snapshot = syntax_map.snapshot();
1559 drop(syntax_map);
1560
1561 let parse_task = cx.background_spawn({
1562 let language = language.clone();
1563 let language_registry = language_registry.clone();
1564 async move {
1565 syntax_snapshot.reparse(&text, language_registry, language);
1566 syntax_snapshot
1567 }
1568 });
1569
1570 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1571 match cx
1572 .background_executor()
1573 .block_with_timeout(self.sync_parse_timeout, parse_task)
1574 {
1575 Ok(new_syntax_snapshot) => {
1576 self.did_finish_parsing(new_syntax_snapshot, cx);
1577 self.reparse = None;
1578 }
1579 Err(parse_task) => {
1580 // todo(lw): hot foreground spawn
1581 self.reparse = Some(cx.spawn(async move |this, cx| {
1582 let new_syntax_map = cx.background_spawn(parse_task).await;
1583 this.update(cx, move |this, cx| {
1584 let grammar_changed = || {
1585 this.language.as_ref().is_none_or(|current_language| {
1586 !Arc::ptr_eq(&language, current_language)
1587 })
1588 };
1589 let language_registry_changed = || {
1590 new_syntax_map.contains_unknown_injections()
1591 && language_registry.is_some_and(|registry| {
1592 registry.version() != new_syntax_map.language_registry_version()
1593 })
1594 };
1595 let parse_again = this.version.changed_since(&parsed_version)
1596 || language_registry_changed()
1597 || grammar_changed();
1598 this.did_finish_parsing(new_syntax_map, cx);
1599 this.reparse = None;
1600 if parse_again {
1601 this.reparse(cx);
1602 }
1603 })
1604 .ok();
1605 }));
1606 }
1607 }
1608 }
1609
1610 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1611 self.was_changed();
1612 self.non_text_state_update_count += 1;
1613 self.syntax_map.lock().did_parse(syntax_snapshot);
1614 self.request_autoindent(cx);
1615 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1616 cx.emit(BufferEvent::Reparsed);
1617 cx.notify();
1618 }
1619
1620 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1621 self.parse_status.1.clone()
1622 }
1623
1624 /// Assign to the buffer a set of diagnostics created by a given language server.
1625 pub fn update_diagnostics(
1626 &mut self,
1627 server_id: LanguageServerId,
1628 diagnostics: DiagnosticSet,
1629 cx: &mut Context<Self>,
1630 ) {
1631 let lamport_timestamp = self.text.lamport_clock.tick();
1632 let op = Operation::UpdateDiagnostics {
1633 server_id,
1634 diagnostics: diagnostics.iter().cloned().collect(),
1635 lamport_timestamp,
1636 };
1637
1638 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1639 self.send_operation(op, true, cx);
1640 }
1641
1642 pub fn buffer_diagnostics(
1643 &self,
1644 for_server: Option<LanguageServerId>,
1645 ) -> Vec<&DiagnosticEntry<Anchor>> {
1646 match for_server {
1647 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1648 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1649 Err(_) => Vec::new(),
1650 },
1651 None => self
1652 .diagnostics
1653 .iter()
1654 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1655 .collect(),
1656 }
1657 }
1658
1659 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1660 if let Some(indent_sizes) = self.compute_autoindents() {
1661 let indent_sizes = cx.background_spawn(indent_sizes);
1662 match cx
1663 .background_executor()
1664 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1665 {
1666 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1667 Err(indent_sizes) => {
1668 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1669 let indent_sizes = indent_sizes.await;
1670 this.update(cx, |this, cx| {
1671 this.apply_autoindents(indent_sizes, cx);
1672 })
1673 .ok();
1674 }));
1675 }
1676 }
1677 } else {
1678 self.autoindent_requests.clear();
1679 for tx in self.wait_for_autoindent_txs.drain(..) {
1680 tx.send(()).ok();
1681 }
1682 }
1683 }
1684
1685 fn compute_autoindents(
1686 &self,
1687 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1688 let max_rows_between_yields = 100;
1689 let snapshot = self.snapshot();
1690 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1691 return None;
1692 }
1693
1694 let autoindent_requests = self.autoindent_requests.clone();
1695 Some(async move {
1696 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1697 for request in autoindent_requests {
1698 // Resolve each edited range to its row in the current buffer and in the
1699 // buffer before this batch of edits.
1700 let mut row_ranges = Vec::new();
1701 let mut old_to_new_rows = BTreeMap::new();
1702 let mut language_indent_sizes_by_new_row = Vec::new();
1703 for entry in &request.entries {
1704 let position = entry.range.start;
1705 let new_row = position.to_point(&snapshot).row;
1706 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1707 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1708
1709 if !entry.first_line_is_new {
1710 let old_row = position.to_point(&request.before_edit).row;
1711 old_to_new_rows.insert(old_row, new_row);
1712 }
1713 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1714 }
1715
1716 // Build a map containing the suggested indentation for each of the edited lines
1717 // with respect to the state of the buffer before these edits. This map is keyed
1718 // by the rows for these lines in the current state of the buffer.
1719 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1720 let old_edited_ranges =
1721 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1722 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1723 let mut language_indent_size = IndentSize::default();
1724 for old_edited_range in old_edited_ranges {
1725 let suggestions = request
1726 .before_edit
1727 .suggest_autoindents(old_edited_range.clone())
1728 .into_iter()
1729 .flatten();
1730 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1731 if let Some(suggestion) = suggestion {
1732 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1733
1734 // Find the indent size based on the language for this row.
1735 while let Some((row, size)) = language_indent_sizes.peek() {
1736 if *row > new_row {
1737 break;
1738 }
1739 language_indent_size = *size;
1740 language_indent_sizes.next();
1741 }
1742
1743 let suggested_indent = old_to_new_rows
1744 .get(&suggestion.basis_row)
1745 .and_then(|from_row| {
1746 Some(old_suggestions.get(from_row).copied()?.0)
1747 })
1748 .unwrap_or_else(|| {
1749 request
1750 .before_edit
1751 .indent_size_for_line(suggestion.basis_row)
1752 })
1753 .with_delta(suggestion.delta, language_indent_size);
1754 old_suggestions
1755 .insert(new_row, (suggested_indent, suggestion.within_error));
1756 }
1757 }
1758 yield_now().await;
1759 }
1760
1761 // Compute new suggestions for each line, but only include them in the result
1762 // if they differ from the old suggestion for that line.
1763 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1764 let mut language_indent_size = IndentSize::default();
1765 for (row_range, original_indent_column) in row_ranges {
1766 let new_edited_row_range = if request.is_block_mode {
1767 row_range.start..row_range.start + 1
1768 } else {
1769 row_range.clone()
1770 };
1771
1772 let suggestions = snapshot
1773 .suggest_autoindents(new_edited_row_range.clone())
1774 .into_iter()
1775 .flatten();
1776 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1777 if let Some(suggestion) = suggestion {
1778 // Find the indent size based on the language for this row.
1779 while let Some((row, size)) = language_indent_sizes.peek() {
1780 if *row > new_row {
1781 break;
1782 }
1783 language_indent_size = *size;
1784 language_indent_sizes.next();
1785 }
1786
1787 let suggested_indent = indent_sizes
1788 .get(&suggestion.basis_row)
1789 .copied()
1790 .map(|e| e.0)
1791 .unwrap_or_else(|| {
1792 snapshot.indent_size_for_line(suggestion.basis_row)
1793 })
1794 .with_delta(suggestion.delta, language_indent_size);
1795
1796 if old_suggestions.get(&new_row).is_none_or(
1797 |(old_indentation, was_within_error)| {
1798 suggested_indent != *old_indentation
1799 && (!suggestion.within_error || *was_within_error)
1800 },
1801 ) {
1802 indent_sizes.insert(
1803 new_row,
1804 (suggested_indent, request.ignore_empty_lines),
1805 );
1806 }
1807 }
1808 }
1809
1810 if let (true, Some(original_indent_column)) =
1811 (request.is_block_mode, original_indent_column)
1812 {
1813 let new_indent =
1814 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1815 *indent
1816 } else {
1817 snapshot.indent_size_for_line(row_range.start)
1818 };
1819 let delta = new_indent.len as i64 - original_indent_column as i64;
1820 if delta != 0 {
1821 for row in row_range.skip(1) {
1822 indent_sizes.entry(row).or_insert_with(|| {
1823 let mut size = snapshot.indent_size_for_line(row);
1824 if size.kind == new_indent.kind {
1825 match delta.cmp(&0) {
1826 Ordering::Greater => size.len += delta as u32,
1827 Ordering::Less => {
1828 size.len = size.len.saturating_sub(-delta as u32)
1829 }
1830 Ordering::Equal => {}
1831 }
1832 }
1833 (size, request.ignore_empty_lines)
1834 });
1835 }
1836 }
1837 }
1838
1839 yield_now().await;
1840 }
1841 }
1842
1843 indent_sizes
1844 .into_iter()
1845 .filter_map(|(row, (indent, ignore_empty_lines))| {
1846 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1847 None
1848 } else {
1849 Some((row, indent))
1850 }
1851 })
1852 .collect()
1853 })
1854 }
1855
1856 fn apply_autoindents(
1857 &mut self,
1858 indent_sizes: BTreeMap<u32, IndentSize>,
1859 cx: &mut Context<Self>,
1860 ) {
1861 self.autoindent_requests.clear();
1862 for tx in self.wait_for_autoindent_txs.drain(..) {
1863 tx.send(()).ok();
1864 }
1865
1866 let edits: Vec<_> = indent_sizes
1867 .into_iter()
1868 .filter_map(|(row, indent_size)| {
1869 let current_size = indent_size_for_line(self, row);
1870 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1871 })
1872 .collect();
1873
1874 let preserve_preview = self.preserve_preview();
1875 self.edit(edits, None, cx);
1876 if preserve_preview {
1877 self.refresh_preview();
1878 }
1879 }
1880
1881 /// Create a minimal edit that will cause the given row to be indented
1882 /// with the given size. After applying this edit, the length of the line
1883 /// will always be at least `new_size.len`.
1884 pub fn edit_for_indent_size_adjustment(
1885 row: u32,
1886 current_size: IndentSize,
1887 new_size: IndentSize,
1888 ) -> Option<(Range<Point>, String)> {
1889 if new_size.kind == current_size.kind {
1890 match new_size.len.cmp(¤t_size.len) {
1891 Ordering::Greater => {
1892 let point = Point::new(row, 0);
1893 Some((
1894 point..point,
1895 iter::repeat(new_size.char())
1896 .take((new_size.len - current_size.len) as usize)
1897 .collect::<String>(),
1898 ))
1899 }
1900
1901 Ordering::Less => Some((
1902 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1903 String::new(),
1904 )),
1905
1906 Ordering::Equal => None,
1907 }
1908 } else {
1909 Some((
1910 Point::new(row, 0)..Point::new(row, current_size.len),
1911 iter::repeat(new_size.char())
1912 .take(new_size.len as usize)
1913 .collect::<String>(),
1914 ))
1915 }
1916 }
1917
1918 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1919 /// and the given new text.
1920 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1921 let old_text = self.as_rope().clone();
1922 let base_version = self.version();
1923 cx.background_executor()
1924 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1925 let old_text = old_text.to_string();
1926 let line_ending = LineEnding::detect(&new_text);
1927 LineEnding::normalize(&mut new_text);
1928 let edits = text_diff(&old_text, &new_text);
1929 Diff {
1930 base_version,
1931 line_ending,
1932 edits,
1933 }
1934 })
1935 }
1936
1937 /// Spawns a background task that searches the buffer for any whitespace
1938 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1939 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1940 let old_text = self.as_rope().clone();
1941 let line_ending = self.line_ending();
1942 let base_version = self.version();
1943 cx.background_spawn(async move {
1944 let ranges = trailing_whitespace_ranges(&old_text);
1945 let empty = Arc::<str>::from("");
1946 Diff {
1947 base_version,
1948 line_ending,
1949 edits: ranges
1950 .into_iter()
1951 .map(|range| (range, empty.clone()))
1952 .collect(),
1953 }
1954 })
1955 }
1956
1957 /// Ensures that the buffer ends with a single newline character, and
1958 /// no other whitespace. Skips if the buffer is empty.
1959 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1960 let len = self.len();
1961 if len == 0 {
1962 return;
1963 }
1964 let mut offset = len;
1965 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1966 let non_whitespace_len = chunk
1967 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1968 .len();
1969 offset -= chunk.len();
1970 offset += non_whitespace_len;
1971 if non_whitespace_len != 0 {
1972 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1973 return;
1974 }
1975 break;
1976 }
1977 }
1978 self.edit([(offset..len, "\n")], None, cx);
1979 }
1980
1981 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1982 /// calculated, then adjust the diff to account for those changes, and discard any
1983 /// parts of the diff that conflict with those changes.
1984 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1985 let snapshot = self.snapshot();
1986 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1987 let mut delta = 0;
1988 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1989 while let Some(edit_since) = edits_since.peek() {
1990 // If the edit occurs after a diff hunk, then it does not
1991 // affect that hunk.
1992 if edit_since.old.start > range.end {
1993 break;
1994 }
1995 // If the edit precedes the diff hunk, then adjust the hunk
1996 // to reflect the edit.
1997 else if edit_since.old.end < range.start {
1998 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1999 edits_since.next();
2000 }
2001 // If the edit intersects a diff hunk, then discard that hunk.
2002 else {
2003 return None;
2004 }
2005 }
2006
2007 let start = (range.start as i64 + delta) as usize;
2008 let end = (range.end as i64 + delta) as usize;
2009 Some((start..end, new_text))
2010 });
2011
2012 self.start_transaction();
2013 self.text.set_line_ending(diff.line_ending);
2014 self.edit(adjusted_edits, None, cx);
2015 self.end_transaction(cx)
2016 }
2017
2018 pub fn has_unsaved_edits(&self) -> bool {
2019 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2020
2021 if last_version == self.version {
2022 self.has_unsaved_edits
2023 .set((last_version, has_unsaved_edits));
2024 return has_unsaved_edits;
2025 }
2026
2027 let has_edits = self.has_edits_since(&self.saved_version);
2028 self.has_unsaved_edits
2029 .set((self.version.clone(), has_edits));
2030 has_edits
2031 }
2032
2033 /// Checks if the buffer has unsaved changes.
2034 pub fn is_dirty(&self) -> bool {
2035 if self.capability == Capability::ReadOnly {
2036 return false;
2037 }
2038 if self.has_conflict {
2039 return true;
2040 }
2041 match self.file.as_ref().map(|f| f.disk_state()) {
2042 Some(DiskState::New) | Some(DiskState::Deleted) => {
2043 !self.is_empty() && self.has_unsaved_edits()
2044 }
2045 _ => self.has_unsaved_edits(),
2046 }
2047 }
2048
2049 /// Checks if the buffer and its file have both changed since the buffer
2050 /// was last saved or reloaded.
2051 pub fn has_conflict(&self) -> bool {
2052 if self.has_conflict {
2053 return true;
2054 }
2055 let Some(file) = self.file.as_ref() else {
2056 return false;
2057 };
2058 match file.disk_state() {
2059 DiskState::New => false,
2060 DiskState::Present { mtime } => match self.saved_mtime {
2061 Some(saved_mtime) => {
2062 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2063 }
2064 None => true,
2065 },
2066 DiskState::Deleted => false,
2067 }
2068 }
2069
2070 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2071 pub fn subscribe(&mut self) -> Subscription {
2072 self.text.subscribe()
2073 }
2074
2075 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2076 ///
2077 /// This allows downstream code to check if the buffer's text has changed without
2078 /// waiting for an effect cycle, which would be required if using eents.
2079 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2080 if let Err(ix) = self
2081 .change_bits
2082 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2083 {
2084 self.change_bits.insert(ix, bit);
2085 }
2086 }
2087
2088 /// Set the change bit for all "listeners".
2089 fn was_changed(&mut self) {
2090 self.change_bits.retain(|change_bit| {
2091 change_bit
2092 .upgrade()
2093 .inspect(|bit| {
2094 _ = bit.replace(true);
2095 })
2096 .is_some()
2097 });
2098 }
2099
2100 /// Starts a transaction, if one is not already in-progress. When undoing or
2101 /// redoing edits, all of the edits performed within a transaction are undone
2102 /// or redone together.
2103 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2104 self.start_transaction_at(Instant::now())
2105 }
2106
2107 /// Starts a transaction, providing the current time. Subsequent transactions
2108 /// that occur within a short period of time will be grouped together. This
2109 /// is controlled by the buffer's undo grouping duration.
2110 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2111 self.transaction_depth += 1;
2112 if self.was_dirty_before_starting_transaction.is_none() {
2113 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2114 }
2115 self.text.start_transaction_at(now)
2116 }
2117
2118 /// Terminates the current transaction, if this is the outermost transaction.
2119 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2120 self.end_transaction_at(Instant::now(), cx)
2121 }
2122
2123 /// Terminates the current transaction, providing the current time. Subsequent transactions
2124 /// that occur within a short period of time will be grouped together. This
2125 /// is controlled by the buffer's undo grouping duration.
2126 pub fn end_transaction_at(
2127 &mut self,
2128 now: Instant,
2129 cx: &mut Context<Self>,
2130 ) -> Option<TransactionId> {
2131 assert!(self.transaction_depth > 0);
2132 self.transaction_depth -= 1;
2133 let was_dirty = if self.transaction_depth == 0 {
2134 self.was_dirty_before_starting_transaction.take().unwrap()
2135 } else {
2136 false
2137 };
2138 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2139 self.did_edit(&start_version, was_dirty, cx);
2140 Some(transaction_id)
2141 } else {
2142 None
2143 }
2144 }
2145
2146 /// Manually add a transaction to the buffer's undo history.
2147 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2148 self.text.push_transaction(transaction, now);
2149 }
2150
2151 /// Differs from `push_transaction` in that it does not clear the redo
2152 /// stack. Intended to be used to create a parent transaction to merge
2153 /// potential child transactions into.
2154 ///
2155 /// The caller is responsible for removing it from the undo history using
2156 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2157 /// are merged into this transaction, the caller is responsible for ensuring
2158 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2159 /// cleared is to create transactions with the usual `start_transaction` and
2160 /// `end_transaction` methods and merging the resulting transactions into
2161 /// the transaction created by this method
2162 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2163 self.text.push_empty_transaction(now)
2164 }
2165
2166 /// Prevent the last transaction from being grouped with any subsequent transactions,
2167 /// even if they occur with the buffer's undo grouping duration.
2168 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2169 self.text.finalize_last_transaction()
2170 }
2171
2172 /// Manually group all changes since a given transaction.
2173 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2174 self.text.group_until_transaction(transaction_id);
2175 }
2176
2177 /// Manually remove a transaction from the buffer's undo history
2178 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2179 self.text.forget_transaction(transaction_id)
2180 }
2181
2182 /// Retrieve a transaction from the buffer's undo history
2183 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2184 self.text.get_transaction(transaction_id)
2185 }
2186
2187 /// Manually merge two transactions in the buffer's undo history.
2188 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2189 self.text.merge_transactions(transaction, destination);
2190 }
2191
2192 /// Waits for the buffer to receive operations with the given timestamps.
2193 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2194 &mut self,
2195 edit_ids: It,
2196 ) -> impl Future<Output = Result<()>> + use<It> {
2197 self.text.wait_for_edits(edit_ids)
2198 }
2199
2200 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2201 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2202 &mut self,
2203 anchors: It,
2204 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2205 self.text.wait_for_anchors(anchors)
2206 }
2207
2208 /// Waits for the buffer to receive operations up to the given version.
2209 pub fn wait_for_version(
2210 &mut self,
2211 version: clock::Global,
2212 ) -> impl Future<Output = Result<()>> + use<> {
2213 self.text.wait_for_version(version)
2214 }
2215
2216 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2217 /// [`Buffer::wait_for_version`] to resolve with an error.
2218 pub fn give_up_waiting(&mut self) {
2219 self.text.give_up_waiting();
2220 }
2221
2222 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2223 let mut rx = None;
2224 if !self.autoindent_requests.is_empty() {
2225 let channel = oneshot::channel();
2226 self.wait_for_autoindent_txs.push(channel.0);
2227 rx = Some(channel.1);
2228 }
2229 rx
2230 }
2231
2232 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2233 pub fn set_active_selections(
2234 &mut self,
2235 selections: Arc<[Selection<Anchor>]>,
2236 line_mode: bool,
2237 cursor_shape: CursorShape,
2238 cx: &mut Context<Self>,
2239 ) {
2240 let lamport_timestamp = self.text.lamport_clock.tick();
2241 self.remote_selections.insert(
2242 self.text.replica_id(),
2243 SelectionSet {
2244 selections: selections.clone(),
2245 lamport_timestamp,
2246 line_mode,
2247 cursor_shape,
2248 },
2249 );
2250 self.send_operation(
2251 Operation::UpdateSelections {
2252 selections,
2253 line_mode,
2254 lamport_timestamp,
2255 cursor_shape,
2256 },
2257 true,
2258 cx,
2259 );
2260 self.non_text_state_update_count += 1;
2261 cx.notify();
2262 }
2263
2264 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2265 /// this replica.
2266 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2267 if self
2268 .remote_selections
2269 .get(&self.text.replica_id())
2270 .is_none_or(|set| !set.selections.is_empty())
2271 {
2272 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2273 }
2274 }
2275
2276 pub fn set_agent_selections(
2277 &mut self,
2278 selections: Arc<[Selection<Anchor>]>,
2279 line_mode: bool,
2280 cursor_shape: CursorShape,
2281 cx: &mut Context<Self>,
2282 ) {
2283 let lamport_timestamp = self.text.lamport_clock.tick();
2284 self.remote_selections.insert(
2285 ReplicaId::AGENT,
2286 SelectionSet {
2287 selections,
2288 lamport_timestamp,
2289 line_mode,
2290 cursor_shape,
2291 },
2292 );
2293 self.non_text_state_update_count += 1;
2294 cx.notify();
2295 }
2296
2297 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2298 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2299 }
2300
2301 /// Replaces the buffer's entire text.
2302 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2303 where
2304 T: Into<Arc<str>>,
2305 {
2306 self.autoindent_requests.clear();
2307 self.edit([(0..self.len(), text)], None, cx)
2308 }
2309
2310 /// Appends the given text to the end of the buffer.
2311 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2312 where
2313 T: Into<Arc<str>>,
2314 {
2315 self.edit([(self.len()..self.len(), text)], None, cx)
2316 }
2317
2318 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2319 /// delete, and a string of text to insert at that location.
2320 ///
2321 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2322 /// request for the edited ranges, which will be processed when the buffer finishes
2323 /// parsing.
2324 ///
2325 /// Parsing takes place at the end of a transaction, and may compute synchronously
2326 /// or asynchronously, depending on the changes.
2327 pub fn edit<I, S, T>(
2328 &mut self,
2329 edits_iter: I,
2330 autoindent_mode: Option<AutoindentMode>,
2331 cx: &mut Context<Self>,
2332 ) -> Option<clock::Lamport>
2333 where
2334 I: IntoIterator<Item = (Range<S>, T)>,
2335 S: ToOffset,
2336 T: Into<Arc<str>>,
2337 {
2338 // Skip invalid edits and coalesce contiguous ones.
2339 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2340
2341 for (range, new_text) in edits_iter {
2342 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2343
2344 if range.start > range.end {
2345 mem::swap(&mut range.start, &mut range.end);
2346 }
2347 let new_text = new_text.into();
2348 if !new_text.is_empty() || !range.is_empty() {
2349 if let Some((prev_range, prev_text)) = edits.last_mut()
2350 && prev_range.end >= range.start
2351 {
2352 prev_range.end = cmp::max(prev_range.end, range.end);
2353 *prev_text = format!("{prev_text}{new_text}").into();
2354 } else {
2355 edits.push((range, new_text));
2356 }
2357 }
2358 }
2359 if edits.is_empty() {
2360 return None;
2361 }
2362
2363 self.start_transaction();
2364 self.pending_autoindent.take();
2365 let autoindent_request = autoindent_mode
2366 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2367
2368 let edit_operation = self
2369 .text
2370 .edit(edits.iter().cloned(), cx.background_executor());
2371 let edit_id = edit_operation.timestamp();
2372
2373 if let Some((before_edit, mode)) = autoindent_request {
2374 let mut delta = 0isize;
2375 let mut previous_setting = None;
2376 let entries: Vec<_> = edits
2377 .into_iter()
2378 .enumerate()
2379 .zip(&edit_operation.as_edit().unwrap().new_text)
2380 .filter(|((_, (range, _)), _)| {
2381 let language = before_edit.language_at(range.start);
2382 let language_id = language.map(|l| l.id());
2383 if let Some((cached_language_id, auto_indent)) = previous_setting
2384 && cached_language_id == language_id
2385 {
2386 auto_indent
2387 } else {
2388 // The auto-indent setting is not present in editorconfigs, hence
2389 // we can avoid passing the file here.
2390 let auto_indent =
2391 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2392 previous_setting = Some((language_id, auto_indent));
2393 auto_indent
2394 }
2395 })
2396 .map(|((ix, (range, _)), new_text)| {
2397 let new_text_length = new_text.len();
2398 let old_start = range.start.to_point(&before_edit);
2399 let new_start = (delta + range.start as isize) as usize;
2400 let range_len = range.end - range.start;
2401 delta += new_text_length as isize - range_len as isize;
2402
2403 // Decide what range of the insertion to auto-indent, and whether
2404 // the first line of the insertion should be considered a newly-inserted line
2405 // or an edit to an existing line.
2406 let mut range_of_insertion_to_indent = 0..new_text_length;
2407 let mut first_line_is_new = true;
2408
2409 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2410 let old_line_end = before_edit.line_len(old_start.row);
2411
2412 if old_start.column > old_line_start {
2413 first_line_is_new = false;
2414 }
2415
2416 if !new_text.contains('\n')
2417 && (old_start.column + (range_len as u32) < old_line_end
2418 || old_line_end == old_line_start)
2419 {
2420 first_line_is_new = false;
2421 }
2422
2423 // When inserting text starting with a newline, avoid auto-indenting the
2424 // previous line.
2425 if new_text.starts_with('\n') {
2426 range_of_insertion_to_indent.start += 1;
2427 first_line_is_new = true;
2428 }
2429
2430 let mut original_indent_column = None;
2431 if let AutoindentMode::Block {
2432 original_indent_columns,
2433 } = &mode
2434 {
2435 original_indent_column = Some(if new_text.starts_with('\n') {
2436 indent_size_for_text(
2437 new_text[range_of_insertion_to_indent.clone()].chars(),
2438 )
2439 .len
2440 } else {
2441 original_indent_columns
2442 .get(ix)
2443 .copied()
2444 .flatten()
2445 .unwrap_or_else(|| {
2446 indent_size_for_text(
2447 new_text[range_of_insertion_to_indent.clone()].chars(),
2448 )
2449 .len
2450 })
2451 });
2452
2453 // Avoid auto-indenting the line after the edit.
2454 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2455 range_of_insertion_to_indent.end -= 1;
2456 }
2457 }
2458
2459 AutoindentRequestEntry {
2460 first_line_is_new,
2461 original_indent_column,
2462 indent_size: before_edit.language_indent_size_at(range.start, cx),
2463 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2464 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2465 }
2466 })
2467 .collect();
2468
2469 if !entries.is_empty() {
2470 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2471 before_edit,
2472 entries,
2473 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2474 ignore_empty_lines: false,
2475 }));
2476 }
2477 }
2478
2479 self.end_transaction(cx);
2480 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2481 Some(edit_id)
2482 }
2483
2484 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2485 self.was_changed();
2486
2487 if self.edits_since::<usize>(old_version).next().is_none() {
2488 return;
2489 }
2490
2491 self.reparse(cx);
2492 cx.emit(BufferEvent::Edited);
2493 if was_dirty != self.is_dirty() {
2494 cx.emit(BufferEvent::DirtyChanged);
2495 }
2496 cx.notify();
2497 }
2498
2499 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2500 where
2501 I: IntoIterator<Item = Range<T>>,
2502 T: ToOffset + Copy,
2503 {
2504 let before_edit = self.snapshot();
2505 let entries = ranges
2506 .into_iter()
2507 .map(|range| AutoindentRequestEntry {
2508 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2509 first_line_is_new: true,
2510 indent_size: before_edit.language_indent_size_at(range.start, cx),
2511 original_indent_column: None,
2512 })
2513 .collect();
2514 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2515 before_edit,
2516 entries,
2517 is_block_mode: false,
2518 ignore_empty_lines: true,
2519 }));
2520 self.request_autoindent(cx);
2521 }
2522
2523 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2524 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2525 pub fn insert_empty_line(
2526 &mut self,
2527 position: impl ToPoint,
2528 space_above: bool,
2529 space_below: bool,
2530 cx: &mut Context<Self>,
2531 ) -> Point {
2532 let mut position = position.to_point(self);
2533
2534 self.start_transaction();
2535
2536 self.edit(
2537 [(position..position, "\n")],
2538 Some(AutoindentMode::EachLine),
2539 cx,
2540 );
2541
2542 if position.column > 0 {
2543 position += Point::new(1, 0);
2544 }
2545
2546 if !self.is_line_blank(position.row) {
2547 self.edit(
2548 [(position..position, "\n")],
2549 Some(AutoindentMode::EachLine),
2550 cx,
2551 );
2552 }
2553
2554 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2555 self.edit(
2556 [(position..position, "\n")],
2557 Some(AutoindentMode::EachLine),
2558 cx,
2559 );
2560 position.row += 1;
2561 }
2562
2563 if space_below
2564 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2565 {
2566 self.edit(
2567 [(position..position, "\n")],
2568 Some(AutoindentMode::EachLine),
2569 cx,
2570 );
2571 }
2572
2573 self.end_transaction(cx);
2574
2575 position
2576 }
2577
2578 /// Applies the given remote operations to the buffer.
2579 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2580 self.pending_autoindent.take();
2581 let was_dirty = self.is_dirty();
2582 let old_version = self.version.clone();
2583 let mut deferred_ops = Vec::new();
2584 let buffer_ops = ops
2585 .into_iter()
2586 .filter_map(|op| match op {
2587 Operation::Buffer(op) => Some(op),
2588 _ => {
2589 if self.can_apply_op(&op) {
2590 self.apply_op(op, cx);
2591 } else {
2592 deferred_ops.push(op);
2593 }
2594 None
2595 }
2596 })
2597 .collect::<Vec<_>>();
2598 for operation in buffer_ops.iter() {
2599 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2600 }
2601 self.text
2602 .apply_ops(buffer_ops, Some(cx.background_executor()));
2603 self.deferred_ops.insert(deferred_ops);
2604 self.flush_deferred_ops(cx);
2605 self.did_edit(&old_version, was_dirty, cx);
2606 // Notify independently of whether the buffer was edited as the operations could include a
2607 // selection update.
2608 cx.notify();
2609 }
2610
2611 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2612 let mut deferred_ops = Vec::new();
2613 for op in self.deferred_ops.drain().iter().cloned() {
2614 if self.can_apply_op(&op) {
2615 self.apply_op(op, cx);
2616 } else {
2617 deferred_ops.push(op);
2618 }
2619 }
2620 self.deferred_ops.insert(deferred_ops);
2621 }
2622
2623 pub fn has_deferred_ops(&self) -> bool {
2624 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2625 }
2626
2627 fn can_apply_op(&self, operation: &Operation) -> bool {
2628 match operation {
2629 Operation::Buffer(_) => {
2630 unreachable!("buffer operations should never be applied at this layer")
2631 }
2632 Operation::UpdateDiagnostics {
2633 diagnostics: diagnostic_set,
2634 ..
2635 } => diagnostic_set.iter().all(|diagnostic| {
2636 self.text.can_resolve(&diagnostic.range.start)
2637 && self.text.can_resolve(&diagnostic.range.end)
2638 }),
2639 Operation::UpdateSelections { selections, .. } => selections
2640 .iter()
2641 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2642 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2643 }
2644 }
2645
2646 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2647 match operation {
2648 Operation::Buffer(_) => {
2649 unreachable!("buffer operations should never be applied at this layer")
2650 }
2651 Operation::UpdateDiagnostics {
2652 server_id,
2653 diagnostics: diagnostic_set,
2654 lamport_timestamp,
2655 } => {
2656 let snapshot = self.snapshot();
2657 self.apply_diagnostic_update(
2658 server_id,
2659 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2660 lamport_timestamp,
2661 cx,
2662 );
2663 }
2664 Operation::UpdateSelections {
2665 selections,
2666 lamport_timestamp,
2667 line_mode,
2668 cursor_shape,
2669 } => {
2670 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2671 && set.lamport_timestamp > lamport_timestamp
2672 {
2673 return;
2674 }
2675
2676 self.remote_selections.insert(
2677 lamport_timestamp.replica_id,
2678 SelectionSet {
2679 selections,
2680 lamport_timestamp,
2681 line_mode,
2682 cursor_shape,
2683 },
2684 );
2685 self.text.lamport_clock.observe(lamport_timestamp);
2686 self.non_text_state_update_count += 1;
2687 }
2688 Operation::UpdateCompletionTriggers {
2689 triggers,
2690 lamport_timestamp,
2691 server_id,
2692 } => {
2693 if triggers.is_empty() {
2694 self.completion_triggers_per_language_server
2695 .remove(&server_id);
2696 self.completion_triggers = self
2697 .completion_triggers_per_language_server
2698 .values()
2699 .flat_map(|triggers| triggers.iter().cloned())
2700 .collect();
2701 } else {
2702 self.completion_triggers_per_language_server
2703 .insert(server_id, triggers.iter().cloned().collect());
2704 self.completion_triggers.extend(triggers);
2705 }
2706 self.text.lamport_clock.observe(lamport_timestamp);
2707 }
2708 Operation::UpdateLineEnding {
2709 line_ending,
2710 lamport_timestamp,
2711 } => {
2712 self.text.set_line_ending(line_ending);
2713 self.text.lamport_clock.observe(lamport_timestamp);
2714 }
2715 }
2716 }
2717
2718 fn apply_diagnostic_update(
2719 &mut self,
2720 server_id: LanguageServerId,
2721 diagnostics: DiagnosticSet,
2722 lamport_timestamp: clock::Lamport,
2723 cx: &mut Context<Self>,
2724 ) {
2725 if lamport_timestamp > self.diagnostics_timestamp {
2726 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2727 if diagnostics.is_empty() {
2728 if let Ok(ix) = ix {
2729 self.diagnostics.remove(ix);
2730 }
2731 } else {
2732 match ix {
2733 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2734 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2735 };
2736 }
2737 self.diagnostics_timestamp = lamport_timestamp;
2738 self.non_text_state_update_count += 1;
2739 self.text.lamport_clock.observe(lamport_timestamp);
2740 cx.notify();
2741 cx.emit(BufferEvent::DiagnosticsUpdated);
2742 }
2743 }
2744
2745 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2746 self.was_changed();
2747 cx.emit(BufferEvent::Operation {
2748 operation,
2749 is_local,
2750 });
2751 }
2752
2753 /// Removes the selections for a given peer.
2754 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2755 self.remote_selections.remove(&replica_id);
2756 cx.notify();
2757 }
2758
2759 /// Undoes the most recent transaction.
2760 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2761 let was_dirty = self.is_dirty();
2762 let old_version = self.version.clone();
2763
2764 if let Some((transaction_id, operation)) = self.text.undo() {
2765 self.send_operation(Operation::Buffer(operation), true, cx);
2766 self.did_edit(&old_version, was_dirty, cx);
2767 Some(transaction_id)
2768 } else {
2769 None
2770 }
2771 }
2772
2773 /// Manually undoes a specific transaction in the buffer's undo history.
2774 pub fn undo_transaction(
2775 &mut self,
2776 transaction_id: TransactionId,
2777 cx: &mut Context<Self>,
2778 ) -> bool {
2779 let was_dirty = self.is_dirty();
2780 let old_version = self.version.clone();
2781 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2782 self.send_operation(Operation::Buffer(operation), true, cx);
2783 self.did_edit(&old_version, was_dirty, cx);
2784 true
2785 } else {
2786 false
2787 }
2788 }
2789
2790 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2791 pub fn undo_to_transaction(
2792 &mut self,
2793 transaction_id: TransactionId,
2794 cx: &mut Context<Self>,
2795 ) -> bool {
2796 let was_dirty = self.is_dirty();
2797 let old_version = self.version.clone();
2798
2799 let operations = self.text.undo_to_transaction(transaction_id);
2800 let undone = !operations.is_empty();
2801 for operation in operations {
2802 self.send_operation(Operation::Buffer(operation), true, cx);
2803 }
2804 if undone {
2805 self.did_edit(&old_version, was_dirty, cx)
2806 }
2807 undone
2808 }
2809
2810 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2811 let was_dirty = self.is_dirty();
2812 let operation = self.text.undo_operations(counts);
2813 let old_version = self.version.clone();
2814 self.send_operation(Operation::Buffer(operation), true, cx);
2815 self.did_edit(&old_version, was_dirty, cx);
2816 }
2817
2818 /// Manually redoes a specific transaction in the buffer's redo history.
2819 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2820 let was_dirty = self.is_dirty();
2821 let old_version = self.version.clone();
2822
2823 if let Some((transaction_id, operation)) = self.text.redo() {
2824 self.send_operation(Operation::Buffer(operation), true, cx);
2825 self.did_edit(&old_version, was_dirty, cx);
2826 Some(transaction_id)
2827 } else {
2828 None
2829 }
2830 }
2831
2832 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2833 pub fn redo_to_transaction(
2834 &mut self,
2835 transaction_id: TransactionId,
2836 cx: &mut Context<Self>,
2837 ) -> bool {
2838 let was_dirty = self.is_dirty();
2839 let old_version = self.version.clone();
2840
2841 let operations = self.text.redo_to_transaction(transaction_id);
2842 let redone = !operations.is_empty();
2843 for operation in operations {
2844 self.send_operation(Operation::Buffer(operation), true, cx);
2845 }
2846 if redone {
2847 self.did_edit(&old_version, was_dirty, cx)
2848 }
2849 redone
2850 }
2851
2852 /// Override current completion triggers with the user-provided completion triggers.
2853 pub fn set_completion_triggers(
2854 &mut self,
2855 server_id: LanguageServerId,
2856 triggers: BTreeSet<String>,
2857 cx: &mut Context<Self>,
2858 ) {
2859 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2860 if triggers.is_empty() {
2861 self.completion_triggers_per_language_server
2862 .remove(&server_id);
2863 self.completion_triggers = self
2864 .completion_triggers_per_language_server
2865 .values()
2866 .flat_map(|triggers| triggers.iter().cloned())
2867 .collect();
2868 } else {
2869 self.completion_triggers_per_language_server
2870 .insert(server_id, triggers.clone());
2871 self.completion_triggers.extend(triggers.iter().cloned());
2872 }
2873 self.send_operation(
2874 Operation::UpdateCompletionTriggers {
2875 triggers: triggers.into_iter().collect(),
2876 lamport_timestamp: self.completion_triggers_timestamp,
2877 server_id,
2878 },
2879 true,
2880 cx,
2881 );
2882 cx.notify();
2883 }
2884
2885 /// Returns a list of strings which trigger a completion menu for this language.
2886 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2887 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2888 &self.completion_triggers
2889 }
2890
2891 /// Call this directly after performing edits to prevent the preview tab
2892 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2893 /// to return false until there are additional edits.
2894 pub fn refresh_preview(&mut self) {
2895 self.preview_version = self.version.clone();
2896 }
2897
2898 /// Whether we should preserve the preview status of a tab containing this buffer.
2899 pub fn preserve_preview(&self) -> bool {
2900 !self.has_edits_since(&self.preview_version)
2901 }
2902}
2903
2904#[doc(hidden)]
2905#[cfg(any(test, feature = "test-support"))]
2906impl Buffer {
2907 pub fn edit_via_marked_text(
2908 &mut self,
2909 marked_string: &str,
2910 autoindent_mode: Option<AutoindentMode>,
2911 cx: &mut Context<Self>,
2912 ) {
2913 let edits = self.edits_for_marked_text(marked_string);
2914 self.edit(edits, autoindent_mode, cx);
2915 }
2916
2917 pub fn set_group_interval(&mut self, group_interval: Duration) {
2918 self.text.set_group_interval(group_interval);
2919 }
2920
2921 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2922 where
2923 T: rand::Rng,
2924 {
2925 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2926 let mut last_end = None;
2927 for _ in 0..old_range_count {
2928 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2929 break;
2930 }
2931
2932 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2933 let mut range = self.random_byte_range(new_start, rng);
2934 if rng.random_bool(0.2) {
2935 mem::swap(&mut range.start, &mut range.end);
2936 }
2937 last_end = Some(range.end);
2938
2939 let new_text_len = rng.random_range(0..10);
2940 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2941 new_text = new_text.to_uppercase();
2942
2943 edits.push((range, new_text));
2944 }
2945 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2946 self.edit(edits, None, cx);
2947 }
2948
2949 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2950 let was_dirty = self.is_dirty();
2951 let old_version = self.version.clone();
2952
2953 let ops = self.text.randomly_undo_redo(rng);
2954 if !ops.is_empty() {
2955 for op in ops {
2956 self.send_operation(Operation::Buffer(op), true, cx);
2957 self.did_edit(&old_version, was_dirty, cx);
2958 }
2959 }
2960 }
2961}
2962
2963impl EventEmitter<BufferEvent> for Buffer {}
2964
2965impl Deref for Buffer {
2966 type Target = TextBuffer;
2967
2968 fn deref(&self) -> &Self::Target {
2969 &self.text
2970 }
2971}
2972
2973impl BufferSnapshot {
2974 /// Returns [`IndentSize`] for a given line that respects user settings and
2975 /// language preferences.
2976 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2977 indent_size_for_line(self, row)
2978 }
2979
2980 /// Returns [`IndentSize`] for a given position that respects user settings
2981 /// and language preferences.
2982 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2983 let settings = language_settings(
2984 self.language_at(position).map(|l| l.name()),
2985 self.file(),
2986 cx,
2987 );
2988 if settings.hard_tabs {
2989 IndentSize::tab()
2990 } else {
2991 IndentSize::spaces(settings.tab_size.get())
2992 }
2993 }
2994
2995 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2996 /// is passed in as `single_indent_size`.
2997 pub fn suggested_indents(
2998 &self,
2999 rows: impl Iterator<Item = u32>,
3000 single_indent_size: IndentSize,
3001 ) -> BTreeMap<u32, IndentSize> {
3002 let mut result = BTreeMap::new();
3003
3004 for row_range in contiguous_ranges(rows, 10) {
3005 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3006 Some(suggestions) => suggestions,
3007 _ => break,
3008 };
3009
3010 for (row, suggestion) in row_range.zip(suggestions) {
3011 let indent_size = if let Some(suggestion) = suggestion {
3012 result
3013 .get(&suggestion.basis_row)
3014 .copied()
3015 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3016 .with_delta(suggestion.delta, single_indent_size)
3017 } else {
3018 self.indent_size_for_line(row)
3019 };
3020
3021 result.insert(row, indent_size);
3022 }
3023 }
3024
3025 result
3026 }
3027
3028 fn suggest_autoindents(
3029 &self,
3030 row_range: Range<u32>,
3031 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3032 let config = &self.language.as_ref()?.config;
3033 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3034
3035 #[derive(Debug, Clone)]
3036 struct StartPosition {
3037 start: Point,
3038 suffix: SharedString,
3039 }
3040
3041 // Find the suggested indentation ranges based on the syntax tree.
3042 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3043 let end = Point::new(row_range.end, 0);
3044 let range = (start..end).to_offset(&self.text);
3045 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3046 Some(&grammar.indents_config.as_ref()?.query)
3047 });
3048 let indent_configs = matches
3049 .grammars()
3050 .iter()
3051 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3052 .collect::<Vec<_>>();
3053
3054 let mut indent_ranges = Vec::<Range<Point>>::new();
3055 let mut start_positions = Vec::<StartPosition>::new();
3056 let mut outdent_positions = Vec::<Point>::new();
3057 while let Some(mat) = matches.peek() {
3058 let mut start: Option<Point> = None;
3059 let mut end: Option<Point> = None;
3060
3061 let config = indent_configs[mat.grammar_index];
3062 for capture in mat.captures {
3063 if capture.index == config.indent_capture_ix {
3064 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3065 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3066 } else if Some(capture.index) == config.start_capture_ix {
3067 start = Some(Point::from_ts_point(capture.node.end_position()));
3068 } else if Some(capture.index) == config.end_capture_ix {
3069 end = Some(Point::from_ts_point(capture.node.start_position()));
3070 } else if Some(capture.index) == config.outdent_capture_ix {
3071 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3072 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3073 start_positions.push(StartPosition {
3074 start: Point::from_ts_point(capture.node.start_position()),
3075 suffix: suffix.clone(),
3076 });
3077 }
3078 }
3079
3080 matches.advance();
3081 if let Some((start, end)) = start.zip(end) {
3082 if start.row == end.row {
3083 continue;
3084 }
3085 let range = start..end;
3086 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3087 Err(ix) => indent_ranges.insert(ix, range),
3088 Ok(ix) => {
3089 let prev_range = &mut indent_ranges[ix];
3090 prev_range.end = prev_range.end.max(range.end);
3091 }
3092 }
3093 }
3094 }
3095
3096 let mut error_ranges = Vec::<Range<Point>>::new();
3097 let mut matches = self
3098 .syntax
3099 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3100 while let Some(mat) = matches.peek() {
3101 let node = mat.captures[0].node;
3102 let start = Point::from_ts_point(node.start_position());
3103 let end = Point::from_ts_point(node.end_position());
3104 let range = start..end;
3105 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3106 Ok(ix) | Err(ix) => ix,
3107 };
3108 let mut end_ix = ix;
3109 while let Some(existing_range) = error_ranges.get(end_ix) {
3110 if existing_range.end < end {
3111 end_ix += 1;
3112 } else {
3113 break;
3114 }
3115 }
3116 error_ranges.splice(ix..end_ix, [range]);
3117 matches.advance();
3118 }
3119
3120 outdent_positions.sort();
3121 for outdent_position in outdent_positions {
3122 // find the innermost indent range containing this outdent_position
3123 // set its end to the outdent position
3124 if let Some(range_to_truncate) = indent_ranges
3125 .iter_mut()
3126 .filter(|indent_range| indent_range.contains(&outdent_position))
3127 .next_back()
3128 {
3129 range_to_truncate.end = outdent_position;
3130 }
3131 }
3132
3133 start_positions.sort_by_key(|b| b.start);
3134
3135 // Find the suggested indentation increases and decreased based on regexes.
3136 let mut regex_outdent_map = HashMap::default();
3137 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3138 let mut start_positions_iter = start_positions.iter().peekable();
3139
3140 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3141 self.for_each_line(
3142 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3143 ..Point::new(row_range.end, 0),
3144 |row, line| {
3145 if config
3146 .decrease_indent_pattern
3147 .as_ref()
3148 .is_some_and(|regex| regex.is_match(line))
3149 {
3150 indent_change_rows.push((row, Ordering::Less));
3151 }
3152 if config
3153 .increase_indent_pattern
3154 .as_ref()
3155 .is_some_and(|regex| regex.is_match(line))
3156 {
3157 indent_change_rows.push((row + 1, Ordering::Greater));
3158 }
3159 while let Some(pos) = start_positions_iter.peek() {
3160 if pos.start.row < row {
3161 let pos = start_positions_iter.next().unwrap();
3162 last_seen_suffix
3163 .entry(pos.suffix.to_string())
3164 .or_default()
3165 .push(pos.start);
3166 } else {
3167 break;
3168 }
3169 }
3170 for rule in &config.decrease_indent_patterns {
3171 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3172 let row_start_column = self.indent_size_for_line(row).len;
3173 let basis_row = rule
3174 .valid_after
3175 .iter()
3176 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3177 .flatten()
3178 .filter(|start_point| start_point.column <= row_start_column)
3179 .max_by_key(|start_point| start_point.row);
3180 if let Some(outdent_to_row) = basis_row {
3181 regex_outdent_map.insert(row, outdent_to_row.row);
3182 }
3183 break;
3184 }
3185 }
3186 },
3187 );
3188
3189 let mut indent_changes = indent_change_rows.into_iter().peekable();
3190 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3191 prev_non_blank_row.unwrap_or(0)
3192 } else {
3193 row_range.start.saturating_sub(1)
3194 };
3195
3196 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3197 Some(row_range.map(move |row| {
3198 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3199
3200 let mut indent_from_prev_row = false;
3201 let mut outdent_from_prev_row = false;
3202 let mut outdent_to_row = u32::MAX;
3203 let mut from_regex = false;
3204
3205 while let Some((indent_row, delta)) = indent_changes.peek() {
3206 match indent_row.cmp(&row) {
3207 Ordering::Equal => match delta {
3208 Ordering::Less => {
3209 from_regex = true;
3210 outdent_from_prev_row = true
3211 }
3212 Ordering::Greater => {
3213 indent_from_prev_row = true;
3214 from_regex = true
3215 }
3216 _ => {}
3217 },
3218
3219 Ordering::Greater => break,
3220 Ordering::Less => {}
3221 }
3222
3223 indent_changes.next();
3224 }
3225
3226 for range in &indent_ranges {
3227 if range.start.row >= row {
3228 break;
3229 }
3230 if range.start.row == prev_row && range.end > row_start {
3231 indent_from_prev_row = true;
3232 }
3233 if range.end > prev_row_start && range.end <= row_start {
3234 outdent_to_row = outdent_to_row.min(range.start.row);
3235 }
3236 }
3237
3238 if let Some(basis_row) = regex_outdent_map.get(&row) {
3239 indent_from_prev_row = false;
3240 outdent_to_row = *basis_row;
3241 from_regex = true;
3242 }
3243
3244 let within_error = error_ranges
3245 .iter()
3246 .any(|e| e.start.row < row && e.end > row_start);
3247
3248 let suggestion = if outdent_to_row == prev_row
3249 || (outdent_from_prev_row && indent_from_prev_row)
3250 {
3251 Some(IndentSuggestion {
3252 basis_row: prev_row,
3253 delta: Ordering::Equal,
3254 within_error: within_error && !from_regex,
3255 })
3256 } else if indent_from_prev_row {
3257 Some(IndentSuggestion {
3258 basis_row: prev_row,
3259 delta: Ordering::Greater,
3260 within_error: within_error && !from_regex,
3261 })
3262 } else if outdent_to_row < prev_row {
3263 Some(IndentSuggestion {
3264 basis_row: outdent_to_row,
3265 delta: Ordering::Equal,
3266 within_error: within_error && !from_regex,
3267 })
3268 } else if outdent_from_prev_row {
3269 Some(IndentSuggestion {
3270 basis_row: prev_row,
3271 delta: Ordering::Less,
3272 within_error: within_error && !from_regex,
3273 })
3274 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3275 {
3276 Some(IndentSuggestion {
3277 basis_row: prev_row,
3278 delta: Ordering::Equal,
3279 within_error: within_error && !from_regex,
3280 })
3281 } else {
3282 None
3283 };
3284
3285 prev_row = row;
3286 prev_row_start = row_start;
3287 suggestion
3288 }))
3289 }
3290
3291 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3292 while row > 0 {
3293 row -= 1;
3294 if !self.is_line_blank(row) {
3295 return Some(row);
3296 }
3297 }
3298 None
3299 }
3300
3301 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3302 let captures = self.syntax.captures(range, &self.text, |grammar| {
3303 grammar
3304 .highlights_config
3305 .as_ref()
3306 .map(|config| &config.query)
3307 });
3308 let highlight_maps = captures
3309 .grammars()
3310 .iter()
3311 .map(|grammar| grammar.highlight_map())
3312 .collect();
3313 (captures, highlight_maps)
3314 }
3315
3316 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3317 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3318 /// returned in chunks where each chunk has a single syntax highlighting style and
3319 /// diagnostic status.
3320 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3321 let range = range.start.to_offset(self)..range.end.to_offset(self);
3322
3323 let mut syntax = None;
3324 if language_aware {
3325 syntax = Some(self.get_highlights(range.clone()));
3326 }
3327 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3328 let diagnostics = language_aware;
3329 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3330 }
3331
3332 pub fn highlighted_text_for_range<T: ToOffset>(
3333 &self,
3334 range: Range<T>,
3335 override_style: Option<HighlightStyle>,
3336 syntax_theme: &SyntaxTheme,
3337 ) -> HighlightedText {
3338 HighlightedText::from_buffer_range(
3339 range,
3340 &self.text,
3341 &self.syntax,
3342 override_style,
3343 syntax_theme,
3344 )
3345 }
3346
3347 /// Invokes the given callback for each line of text in the given range of the buffer.
3348 /// Uses callback to avoid allocating a string for each line.
3349 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3350 let mut line = String::new();
3351 let mut row = range.start.row;
3352 for chunk in self
3353 .as_rope()
3354 .chunks_in_range(range.to_offset(self))
3355 .chain(["\n"])
3356 {
3357 for (newline_ix, text) in chunk.split('\n').enumerate() {
3358 if newline_ix > 0 {
3359 callback(row, &line);
3360 row += 1;
3361 line.clear();
3362 }
3363 line.push_str(text);
3364 }
3365 }
3366 }
3367
3368 /// Iterates over every [`SyntaxLayer`] in the buffer.
3369 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3370 self.syntax_layers_for_range(0..self.len(), true)
3371 }
3372
3373 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3374 let offset = position.to_offset(self);
3375 self.syntax_layers_for_range(offset..offset, false)
3376 .filter(|l| l.node().end_byte() > offset)
3377 .last()
3378 }
3379
3380 pub fn syntax_layers_for_range<D: ToOffset>(
3381 &self,
3382 range: Range<D>,
3383 include_hidden: bool,
3384 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3385 self.syntax
3386 .layers_for_range(range, &self.text, include_hidden)
3387 }
3388
3389 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3390 &self,
3391 range: Range<D>,
3392 ) -> Option<SyntaxLayer<'_>> {
3393 let range = range.to_offset(self);
3394 self.syntax
3395 .layers_for_range(range, &self.text, false)
3396 .max_by(|a, b| {
3397 if a.depth != b.depth {
3398 a.depth.cmp(&b.depth)
3399 } else if a.offset.0 != b.offset.0 {
3400 a.offset.0.cmp(&b.offset.0)
3401 } else {
3402 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3403 }
3404 })
3405 }
3406
3407 /// Returns the main [`Language`].
3408 pub fn language(&self) -> Option<&Arc<Language>> {
3409 self.language.as_ref()
3410 }
3411
3412 /// Returns the [`Language`] at the given location.
3413 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3414 self.syntax_layer_at(position)
3415 .map(|info| info.language)
3416 .or(self.language.as_ref())
3417 }
3418
3419 /// Returns the settings for the language at the given location.
3420 pub fn settings_at<'a, D: ToOffset>(
3421 &'a self,
3422 position: D,
3423 cx: &'a App,
3424 ) -> Cow<'a, LanguageSettings> {
3425 language_settings(
3426 self.language_at(position).map(|l| l.name()),
3427 self.file.as_ref(),
3428 cx,
3429 )
3430 }
3431
3432 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3433 CharClassifier::new(self.language_scope_at(point))
3434 }
3435
3436 /// Returns the [`LanguageScope`] at the given location.
3437 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3438 let offset = position.to_offset(self);
3439 let mut scope = None;
3440 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3441
3442 // Use the layer that has the smallest node intersecting the given point.
3443 for layer in self
3444 .syntax
3445 .layers_for_range(offset..offset, &self.text, false)
3446 {
3447 let mut cursor = layer.node().walk();
3448
3449 let mut range = None;
3450 loop {
3451 let child_range = cursor.node().byte_range();
3452 if !child_range.contains(&offset) {
3453 break;
3454 }
3455
3456 range = Some(child_range);
3457 if cursor.goto_first_child_for_byte(offset).is_none() {
3458 break;
3459 }
3460 }
3461
3462 if let Some(range) = range
3463 && smallest_range_and_depth.as_ref().is_none_or(
3464 |(smallest_range, smallest_range_depth)| {
3465 if layer.depth > *smallest_range_depth {
3466 true
3467 } else if layer.depth == *smallest_range_depth {
3468 range.len() < smallest_range.len()
3469 } else {
3470 false
3471 }
3472 },
3473 )
3474 {
3475 smallest_range_and_depth = Some((range, layer.depth));
3476 scope = Some(LanguageScope {
3477 language: layer.language.clone(),
3478 override_id: layer.override_id(offset, &self.text),
3479 });
3480 }
3481 }
3482
3483 scope.or_else(|| {
3484 self.language.clone().map(|language| LanguageScope {
3485 language,
3486 override_id: None,
3487 })
3488 })
3489 }
3490
3491 /// Returns a tuple of the range and character kind of the word
3492 /// surrounding the given position.
3493 pub fn surrounding_word<T: ToOffset>(
3494 &self,
3495 start: T,
3496 scope_context: Option<CharScopeContext>,
3497 ) -> (Range<usize>, Option<CharKind>) {
3498 let mut start = start.to_offset(self);
3499 let mut end = start;
3500 let mut next_chars = self.chars_at(start).take(128).peekable();
3501 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3502
3503 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3504 let word_kind = cmp::max(
3505 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3506 next_chars.peek().copied().map(|c| classifier.kind(c)),
3507 );
3508
3509 for ch in prev_chars {
3510 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3511 start -= ch.len_utf8();
3512 } else {
3513 break;
3514 }
3515 }
3516
3517 for ch in next_chars {
3518 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3519 end += ch.len_utf8();
3520 } else {
3521 break;
3522 }
3523 }
3524
3525 (start..end, word_kind)
3526 }
3527
3528 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3529 /// range. When `require_larger` is true, the node found must be larger than the query range.
3530 ///
3531 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3532 /// be moved to the root of the tree.
3533 fn goto_node_enclosing_range(
3534 cursor: &mut tree_sitter::TreeCursor,
3535 query_range: &Range<usize>,
3536 require_larger: bool,
3537 ) -> bool {
3538 let mut ascending = false;
3539 loop {
3540 let mut range = cursor.node().byte_range();
3541 if query_range.is_empty() {
3542 // When the query range is empty and the current node starts after it, move to the
3543 // previous sibling to find the node the containing node.
3544 if range.start > query_range.start {
3545 cursor.goto_previous_sibling();
3546 range = cursor.node().byte_range();
3547 }
3548 } else {
3549 // When the query range is non-empty and the current node ends exactly at the start,
3550 // move to the next sibling to find a node that extends beyond the start.
3551 if range.end == query_range.start {
3552 cursor.goto_next_sibling();
3553 range = cursor.node().byte_range();
3554 }
3555 }
3556
3557 let encloses = range.contains_inclusive(query_range)
3558 && (!require_larger || range.len() > query_range.len());
3559 if !encloses {
3560 ascending = true;
3561 if !cursor.goto_parent() {
3562 return false;
3563 }
3564 continue;
3565 } else if ascending {
3566 return true;
3567 }
3568
3569 // Descend into the current node.
3570 if cursor
3571 .goto_first_child_for_byte(query_range.start)
3572 .is_none()
3573 {
3574 return true;
3575 }
3576 }
3577 }
3578
3579 pub fn syntax_ancestor<'a, T: ToOffset>(
3580 &'a self,
3581 range: Range<T>,
3582 ) -> Option<tree_sitter::Node<'a>> {
3583 let range = range.start.to_offset(self)..range.end.to_offset(self);
3584 let mut result: Option<tree_sitter::Node<'a>> = None;
3585 for layer in self
3586 .syntax
3587 .layers_for_range(range.clone(), &self.text, true)
3588 {
3589 let mut cursor = layer.node().walk();
3590
3591 // Find the node that both contains the range and is larger than it.
3592 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3593 continue;
3594 }
3595
3596 let left_node = cursor.node();
3597 let mut layer_result = left_node;
3598
3599 // For an empty range, try to find another node immediately to the right of the range.
3600 if left_node.end_byte() == range.start {
3601 let mut right_node = None;
3602 while !cursor.goto_next_sibling() {
3603 if !cursor.goto_parent() {
3604 break;
3605 }
3606 }
3607
3608 while cursor.node().start_byte() == range.start {
3609 right_node = Some(cursor.node());
3610 if !cursor.goto_first_child() {
3611 break;
3612 }
3613 }
3614
3615 // If there is a candidate node on both sides of the (empty) range, then
3616 // decide between the two by favoring a named node over an anonymous token.
3617 // If both nodes are the same in that regard, favor the right one.
3618 if let Some(right_node) = right_node
3619 && (right_node.is_named() || !left_node.is_named())
3620 {
3621 layer_result = right_node;
3622 }
3623 }
3624
3625 if let Some(previous_result) = &result
3626 && previous_result.byte_range().len() < layer_result.byte_range().len()
3627 {
3628 continue;
3629 }
3630 result = Some(layer_result);
3631 }
3632
3633 result
3634 }
3635
3636 /// Find the previous sibling syntax node at the given range.
3637 ///
3638 /// This function locates the syntax node that precedes the node containing
3639 /// the given range. It searches hierarchically by:
3640 /// 1. Finding the node that contains the given range
3641 /// 2. Looking for the previous sibling at the same tree level
3642 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3643 ///
3644 /// Returns `None` if there is no previous sibling at any ancestor level.
3645 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3646 &'a self,
3647 range: Range<T>,
3648 ) -> Option<tree_sitter::Node<'a>> {
3649 let range = range.start.to_offset(self)..range.end.to_offset(self);
3650 let mut result: Option<tree_sitter::Node<'a>> = None;
3651
3652 for layer in self
3653 .syntax
3654 .layers_for_range(range.clone(), &self.text, true)
3655 {
3656 let mut cursor = layer.node().walk();
3657
3658 // Find the node that contains the range
3659 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3660 continue;
3661 }
3662
3663 // Look for the previous sibling, moving up ancestor levels if needed
3664 loop {
3665 if cursor.goto_previous_sibling() {
3666 let layer_result = cursor.node();
3667
3668 if let Some(previous_result) = &result {
3669 if previous_result.byte_range().end < layer_result.byte_range().end {
3670 continue;
3671 }
3672 }
3673 result = Some(layer_result);
3674 break;
3675 }
3676
3677 // No sibling found at this level, try moving up to parent
3678 if !cursor.goto_parent() {
3679 break;
3680 }
3681 }
3682 }
3683
3684 result
3685 }
3686
3687 /// Find the next sibling syntax node at the given range.
3688 ///
3689 /// This function locates the syntax node that follows the node containing
3690 /// the given range. It searches hierarchically by:
3691 /// 1. Finding the node that contains the given range
3692 /// 2. Looking for the next sibling at the same tree level
3693 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3694 ///
3695 /// Returns `None` if there is no next sibling at any ancestor level.
3696 pub fn syntax_next_sibling<'a, T: ToOffset>(
3697 &'a self,
3698 range: Range<T>,
3699 ) -> Option<tree_sitter::Node<'a>> {
3700 let range = range.start.to_offset(self)..range.end.to_offset(self);
3701 let mut result: Option<tree_sitter::Node<'a>> = None;
3702
3703 for layer in self
3704 .syntax
3705 .layers_for_range(range.clone(), &self.text, true)
3706 {
3707 let mut cursor = layer.node().walk();
3708
3709 // Find the node that contains the range
3710 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3711 continue;
3712 }
3713
3714 // Look for the next sibling, moving up ancestor levels if needed
3715 loop {
3716 if cursor.goto_next_sibling() {
3717 let layer_result = cursor.node();
3718
3719 if let Some(previous_result) = &result {
3720 if previous_result.byte_range().start > layer_result.byte_range().start {
3721 continue;
3722 }
3723 }
3724 result = Some(layer_result);
3725 break;
3726 }
3727
3728 // No sibling found at this level, try moving up to parent
3729 if !cursor.goto_parent() {
3730 break;
3731 }
3732 }
3733 }
3734
3735 result
3736 }
3737
3738 /// Returns the root syntax node within the given row
3739 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3740 let start_offset = position.to_offset(self);
3741
3742 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3743
3744 let layer = self
3745 .syntax
3746 .layers_for_range(start_offset..start_offset, &self.text, true)
3747 .next()?;
3748
3749 let mut cursor = layer.node().walk();
3750
3751 // Descend to the first leaf that touches the start of the range.
3752 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3753 if cursor.node().end_byte() == start_offset {
3754 cursor.goto_next_sibling();
3755 }
3756 }
3757
3758 // Ascend to the root node within the same row.
3759 while cursor.goto_parent() {
3760 if cursor.node().start_position().row != row {
3761 break;
3762 }
3763 }
3764
3765 Some(cursor.node())
3766 }
3767
3768 /// Returns the outline for the buffer.
3769 ///
3770 /// This method allows passing an optional [`SyntaxTheme`] to
3771 /// syntax-highlight the returned symbols.
3772 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3773 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3774 }
3775
3776 /// Returns all the symbols that contain the given position.
3777 ///
3778 /// This method allows passing an optional [`SyntaxTheme`] to
3779 /// syntax-highlight the returned symbols.
3780 pub fn symbols_containing<T: ToOffset>(
3781 &self,
3782 position: T,
3783 theme: Option<&SyntaxTheme>,
3784 ) -> Vec<OutlineItem<Anchor>> {
3785 let position = position.to_offset(self);
3786 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3787 let end = self.clip_offset(position + 1, Bias::Right);
3788 let mut items = self.outline_items_containing(start..end, false, theme);
3789 let mut prev_depth = None;
3790 items.retain(|item| {
3791 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3792 prev_depth = Some(item.depth);
3793 result
3794 });
3795 items
3796 }
3797
3798 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3799 let range = range.to_offset(self);
3800 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3801 grammar.outline_config.as_ref().map(|c| &c.query)
3802 });
3803 let configs = matches
3804 .grammars()
3805 .iter()
3806 .map(|g| g.outline_config.as_ref().unwrap())
3807 .collect::<Vec<_>>();
3808
3809 while let Some(mat) = matches.peek() {
3810 let config = &configs[mat.grammar_index];
3811 let containing_item_node = maybe!({
3812 let item_node = mat.captures.iter().find_map(|cap| {
3813 if cap.index == config.item_capture_ix {
3814 Some(cap.node)
3815 } else {
3816 None
3817 }
3818 })?;
3819
3820 let item_byte_range = item_node.byte_range();
3821 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3822 None
3823 } else {
3824 Some(item_node)
3825 }
3826 });
3827
3828 if let Some(item_node) = containing_item_node {
3829 return Some(
3830 Point::from_ts_point(item_node.start_position())
3831 ..Point::from_ts_point(item_node.end_position()),
3832 );
3833 }
3834
3835 matches.advance();
3836 }
3837 None
3838 }
3839
3840 pub fn outline_items_containing<T: ToOffset>(
3841 &self,
3842 range: Range<T>,
3843 include_extra_context: bool,
3844 theme: Option<&SyntaxTheme>,
3845 ) -> Vec<OutlineItem<Anchor>> {
3846 self.outline_items_containing_internal(
3847 range,
3848 include_extra_context,
3849 theme,
3850 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3851 )
3852 }
3853
3854 pub fn outline_items_as_points_containing<T: ToOffset>(
3855 &self,
3856 range: Range<T>,
3857 include_extra_context: bool,
3858 theme: Option<&SyntaxTheme>,
3859 ) -> Vec<OutlineItem<Point>> {
3860 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3861 range
3862 })
3863 }
3864
3865 fn outline_items_containing_internal<T: ToOffset, U>(
3866 &self,
3867 range: Range<T>,
3868 include_extra_context: bool,
3869 theme: Option<&SyntaxTheme>,
3870 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3871 ) -> Vec<OutlineItem<U>> {
3872 let range = range.to_offset(self);
3873 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3874 grammar.outline_config.as_ref().map(|c| &c.query)
3875 });
3876
3877 let mut items = Vec::new();
3878 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3879 while let Some(mat) = matches.peek() {
3880 let config = matches.grammars()[mat.grammar_index]
3881 .outline_config
3882 .as_ref()
3883 .unwrap();
3884 if let Some(item) =
3885 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3886 {
3887 items.push(item);
3888 } else if let Some(capture) = mat
3889 .captures
3890 .iter()
3891 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3892 {
3893 let capture_range = capture.node.start_position()..capture.node.end_position();
3894 let mut capture_row_range =
3895 capture_range.start.row as u32..capture_range.end.row as u32;
3896 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3897 {
3898 capture_row_range.end -= 1;
3899 }
3900 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3901 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3902 last_row_range.end = capture_row_range.end;
3903 } else {
3904 annotation_row_ranges.push(capture_row_range);
3905 }
3906 } else {
3907 annotation_row_ranges.push(capture_row_range);
3908 }
3909 }
3910 matches.advance();
3911 }
3912
3913 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3914
3915 // Assign depths based on containment relationships and convert to anchors.
3916 let mut item_ends_stack = Vec::<Point>::new();
3917 let mut anchor_items = Vec::new();
3918 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3919 for item in items {
3920 while let Some(last_end) = item_ends_stack.last().copied() {
3921 if last_end < item.range.end {
3922 item_ends_stack.pop();
3923 } else {
3924 break;
3925 }
3926 }
3927
3928 let mut annotation_row_range = None;
3929 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3930 let row_preceding_item = item.range.start.row.saturating_sub(1);
3931 if next_annotation_row_range.end < row_preceding_item {
3932 annotation_row_ranges.next();
3933 } else {
3934 if next_annotation_row_range.end == row_preceding_item {
3935 annotation_row_range = Some(next_annotation_row_range.clone());
3936 annotation_row_ranges.next();
3937 }
3938 break;
3939 }
3940 }
3941
3942 anchor_items.push(OutlineItem {
3943 depth: item_ends_stack.len(),
3944 range: range_callback(self, item.range.clone()),
3945 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3946 text: item.text,
3947 highlight_ranges: item.highlight_ranges,
3948 name_ranges: item.name_ranges,
3949 body_range: item.body_range.map(|r| range_callback(self, r)),
3950 annotation_range: annotation_row_range.map(|annotation_range| {
3951 let point_range = Point::new(annotation_range.start, 0)
3952 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3953 range_callback(self, point_range)
3954 }),
3955 });
3956 item_ends_stack.push(item.range.end);
3957 }
3958
3959 anchor_items
3960 }
3961
3962 fn next_outline_item(
3963 &self,
3964 config: &OutlineConfig,
3965 mat: &SyntaxMapMatch,
3966 range: &Range<usize>,
3967 include_extra_context: bool,
3968 theme: Option<&SyntaxTheme>,
3969 ) -> Option<OutlineItem<Point>> {
3970 let item_node = mat.captures.iter().find_map(|cap| {
3971 if cap.index == config.item_capture_ix {
3972 Some(cap.node)
3973 } else {
3974 None
3975 }
3976 })?;
3977
3978 let item_byte_range = item_node.byte_range();
3979 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3980 return None;
3981 }
3982 let item_point_range = Point::from_ts_point(item_node.start_position())
3983 ..Point::from_ts_point(item_node.end_position());
3984
3985 let mut open_point = None;
3986 let mut close_point = None;
3987
3988 let mut buffer_ranges = Vec::new();
3989 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
3990 let mut range = node.start_byte()..node.end_byte();
3991 let start = node.start_position();
3992 if node.end_position().row > start.row {
3993 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3994 }
3995
3996 if !range.is_empty() {
3997 buffer_ranges.push((range, node_is_name));
3998 }
3999 };
4000
4001 for capture in mat.captures {
4002 if capture.index == config.name_capture_ix {
4003 add_to_buffer_ranges(capture.node, true);
4004 } else if Some(capture.index) == config.context_capture_ix
4005 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4006 {
4007 add_to_buffer_ranges(capture.node, false);
4008 } else {
4009 if Some(capture.index) == config.open_capture_ix {
4010 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4011 } else if Some(capture.index) == config.close_capture_ix {
4012 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4013 }
4014 }
4015 }
4016
4017 if buffer_ranges.is_empty() {
4018 return None;
4019 }
4020 let source_range_for_text =
4021 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4022
4023 let mut text = String::new();
4024 let mut highlight_ranges = Vec::new();
4025 let mut name_ranges = Vec::new();
4026 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4027 let mut last_buffer_range_end = 0;
4028 for (buffer_range, is_name) in buffer_ranges {
4029 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4030 if space_added {
4031 text.push(' ');
4032 }
4033 let before_append_len = text.len();
4034 let mut offset = buffer_range.start;
4035 chunks.seek(buffer_range.clone());
4036 for mut chunk in chunks.by_ref() {
4037 if chunk.text.len() > buffer_range.end - offset {
4038 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4039 offset = buffer_range.end;
4040 } else {
4041 offset += chunk.text.len();
4042 }
4043 let style = chunk
4044 .syntax_highlight_id
4045 .zip(theme)
4046 .and_then(|(highlight, theme)| highlight.style(theme));
4047 if let Some(style) = style {
4048 let start = text.len();
4049 let end = start + chunk.text.len();
4050 highlight_ranges.push((start..end, style));
4051 }
4052 text.push_str(chunk.text);
4053 if offset >= buffer_range.end {
4054 break;
4055 }
4056 }
4057 if is_name {
4058 let after_append_len = text.len();
4059 let start = if space_added && !name_ranges.is_empty() {
4060 before_append_len - 1
4061 } else {
4062 before_append_len
4063 };
4064 name_ranges.push(start..after_append_len);
4065 }
4066 last_buffer_range_end = buffer_range.end;
4067 }
4068
4069 Some(OutlineItem {
4070 depth: 0, // We'll calculate the depth later
4071 range: item_point_range,
4072 source_range_for_text: source_range_for_text.to_point(self),
4073 text,
4074 highlight_ranges,
4075 name_ranges,
4076 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4077 annotation_range: None,
4078 })
4079 }
4080
4081 pub fn function_body_fold_ranges<T: ToOffset>(
4082 &self,
4083 within: Range<T>,
4084 ) -> impl Iterator<Item = Range<usize>> + '_ {
4085 self.text_object_ranges(within, TreeSitterOptions::default())
4086 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4087 }
4088
4089 /// For each grammar in the language, runs the provided
4090 /// [`tree_sitter::Query`] against the given range.
4091 pub fn matches(
4092 &self,
4093 range: Range<usize>,
4094 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4095 ) -> SyntaxMapMatches<'_> {
4096 self.syntax.matches(range, self, query)
4097 }
4098
4099 pub fn all_bracket_ranges(
4100 &self,
4101 range: Range<usize>,
4102 ) -> impl Iterator<Item = BracketMatch> + '_ {
4103 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4104 grammar.brackets_config.as_ref().map(|c| &c.query)
4105 });
4106 let configs = matches
4107 .grammars()
4108 .iter()
4109 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4110 .collect::<Vec<_>>();
4111
4112 iter::from_fn(move || {
4113 while let Some(mat) = matches.peek() {
4114 let mut open = None;
4115 let mut close = None;
4116 let config = &configs[mat.grammar_index];
4117 let pattern = &config.patterns[mat.pattern_index];
4118 for capture in mat.captures {
4119 if capture.index == config.open_capture_ix {
4120 open = Some(capture.node.byte_range());
4121 } else if capture.index == config.close_capture_ix {
4122 close = Some(capture.node.byte_range());
4123 }
4124 }
4125
4126 matches.advance();
4127
4128 let Some((open_range, close_range)) = open.zip(close) else {
4129 continue;
4130 };
4131
4132 let bracket_range = open_range.start..=close_range.end;
4133 if !bracket_range.overlaps(&range) {
4134 continue;
4135 }
4136
4137 return Some(BracketMatch {
4138 open_range,
4139 close_range,
4140 newline_only: pattern.newline_only,
4141 });
4142 }
4143 None
4144 })
4145 }
4146
4147 /// Returns bracket range pairs overlapping or adjacent to `range`
4148 pub fn bracket_ranges<T: ToOffset>(
4149 &self,
4150 range: Range<T>,
4151 ) -> impl Iterator<Item = BracketMatch> + '_ {
4152 // Find bracket pairs that *inclusively* contain the given range.
4153 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4154 self.all_bracket_ranges(range)
4155 .filter(|pair| !pair.newline_only)
4156 }
4157
4158 pub fn debug_variables_query<T: ToOffset>(
4159 &self,
4160 range: Range<T>,
4161 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4162 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4163
4164 let mut matches = self.syntax.matches_with_options(
4165 range.clone(),
4166 &self.text,
4167 TreeSitterOptions::default(),
4168 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4169 );
4170
4171 let configs = matches
4172 .grammars()
4173 .iter()
4174 .map(|grammar| grammar.debug_variables_config.as_ref())
4175 .collect::<Vec<_>>();
4176
4177 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4178
4179 iter::from_fn(move || {
4180 loop {
4181 while let Some(capture) = captures.pop() {
4182 if capture.0.overlaps(&range) {
4183 return Some(capture);
4184 }
4185 }
4186
4187 let mat = matches.peek()?;
4188
4189 let Some(config) = configs[mat.grammar_index].as_ref() else {
4190 matches.advance();
4191 continue;
4192 };
4193
4194 for capture in mat.captures {
4195 let Some(ix) = config
4196 .objects_by_capture_ix
4197 .binary_search_by_key(&capture.index, |e| e.0)
4198 .ok()
4199 else {
4200 continue;
4201 };
4202 let text_object = config.objects_by_capture_ix[ix].1;
4203 let byte_range = capture.node.byte_range();
4204
4205 let mut found = false;
4206 for (range, existing) in captures.iter_mut() {
4207 if existing == &text_object {
4208 range.start = range.start.min(byte_range.start);
4209 range.end = range.end.max(byte_range.end);
4210 found = true;
4211 break;
4212 }
4213 }
4214
4215 if !found {
4216 captures.push((byte_range, text_object));
4217 }
4218 }
4219
4220 matches.advance();
4221 }
4222 })
4223 }
4224
4225 pub fn text_object_ranges<T: ToOffset>(
4226 &self,
4227 range: Range<T>,
4228 options: TreeSitterOptions,
4229 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4230 let range =
4231 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4232
4233 let mut matches =
4234 self.syntax
4235 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4236 grammar.text_object_config.as_ref().map(|c| &c.query)
4237 });
4238
4239 let configs = matches
4240 .grammars()
4241 .iter()
4242 .map(|grammar| grammar.text_object_config.as_ref())
4243 .collect::<Vec<_>>();
4244
4245 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4246
4247 iter::from_fn(move || {
4248 loop {
4249 while let Some(capture) = captures.pop() {
4250 if capture.0.overlaps(&range) {
4251 return Some(capture);
4252 }
4253 }
4254
4255 let mat = matches.peek()?;
4256
4257 let Some(config) = configs[mat.grammar_index].as_ref() else {
4258 matches.advance();
4259 continue;
4260 };
4261
4262 for capture in mat.captures {
4263 let Some(ix) = config
4264 .text_objects_by_capture_ix
4265 .binary_search_by_key(&capture.index, |e| e.0)
4266 .ok()
4267 else {
4268 continue;
4269 };
4270 let text_object = config.text_objects_by_capture_ix[ix].1;
4271 let byte_range = capture.node.byte_range();
4272
4273 let mut found = false;
4274 for (range, existing) in captures.iter_mut() {
4275 if existing == &text_object {
4276 range.start = range.start.min(byte_range.start);
4277 range.end = range.end.max(byte_range.end);
4278 found = true;
4279 break;
4280 }
4281 }
4282
4283 if !found {
4284 captures.push((byte_range, text_object));
4285 }
4286 }
4287
4288 matches.advance();
4289 }
4290 })
4291 }
4292
4293 /// Returns enclosing bracket ranges containing the given range
4294 pub fn enclosing_bracket_ranges<T: ToOffset>(
4295 &self,
4296 range: Range<T>,
4297 ) -> impl Iterator<Item = BracketMatch> + '_ {
4298 let range = range.start.to_offset(self)..range.end.to_offset(self);
4299
4300 self.bracket_ranges(range.clone()).filter(move |pair| {
4301 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4302 })
4303 }
4304
4305 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4306 ///
4307 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4308 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4309 &self,
4310 range: Range<T>,
4311 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4312 ) -> Option<(Range<usize>, Range<usize>)> {
4313 let range = range.start.to_offset(self)..range.end.to_offset(self);
4314
4315 // Get the ranges of the innermost pair of brackets.
4316 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4317
4318 for pair in self.enclosing_bracket_ranges(range) {
4319 if let Some(range_filter) = range_filter
4320 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4321 {
4322 continue;
4323 }
4324
4325 let len = pair.close_range.end - pair.open_range.start;
4326
4327 if let Some((existing_open, existing_close)) = &result {
4328 let existing_len = existing_close.end - existing_open.start;
4329 if len > existing_len {
4330 continue;
4331 }
4332 }
4333
4334 result = Some((pair.open_range, pair.close_range));
4335 }
4336
4337 result
4338 }
4339
4340 /// Returns anchor ranges for any matches of the redaction query.
4341 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4342 /// will be run on the relevant section of the buffer.
4343 pub fn redacted_ranges<T: ToOffset>(
4344 &self,
4345 range: Range<T>,
4346 ) -> impl Iterator<Item = Range<usize>> + '_ {
4347 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4348 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4349 grammar
4350 .redactions_config
4351 .as_ref()
4352 .map(|config| &config.query)
4353 });
4354
4355 let configs = syntax_matches
4356 .grammars()
4357 .iter()
4358 .map(|grammar| grammar.redactions_config.as_ref())
4359 .collect::<Vec<_>>();
4360
4361 iter::from_fn(move || {
4362 let redacted_range = syntax_matches
4363 .peek()
4364 .and_then(|mat| {
4365 configs[mat.grammar_index].and_then(|config| {
4366 mat.captures
4367 .iter()
4368 .find(|capture| capture.index == config.redaction_capture_ix)
4369 })
4370 })
4371 .map(|mat| mat.node.byte_range());
4372 syntax_matches.advance();
4373 redacted_range
4374 })
4375 }
4376
4377 pub fn injections_intersecting_range<T: ToOffset>(
4378 &self,
4379 range: Range<T>,
4380 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4381 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4382
4383 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4384 grammar
4385 .injection_config
4386 .as_ref()
4387 .map(|config| &config.query)
4388 });
4389
4390 let configs = syntax_matches
4391 .grammars()
4392 .iter()
4393 .map(|grammar| grammar.injection_config.as_ref())
4394 .collect::<Vec<_>>();
4395
4396 iter::from_fn(move || {
4397 let ranges = syntax_matches.peek().and_then(|mat| {
4398 let config = &configs[mat.grammar_index]?;
4399 let content_capture_range = mat.captures.iter().find_map(|capture| {
4400 if capture.index == config.content_capture_ix {
4401 Some(capture.node.byte_range())
4402 } else {
4403 None
4404 }
4405 })?;
4406 let language = self.language_at(content_capture_range.start)?;
4407 Some((content_capture_range, language))
4408 });
4409 syntax_matches.advance();
4410 ranges
4411 })
4412 }
4413
4414 pub fn runnable_ranges(
4415 &self,
4416 offset_range: Range<usize>,
4417 ) -> impl Iterator<Item = RunnableRange> + '_ {
4418 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4419 grammar.runnable_config.as_ref().map(|config| &config.query)
4420 });
4421
4422 let test_configs = syntax_matches
4423 .grammars()
4424 .iter()
4425 .map(|grammar| grammar.runnable_config.as_ref())
4426 .collect::<Vec<_>>();
4427
4428 iter::from_fn(move || {
4429 loop {
4430 let mat = syntax_matches.peek()?;
4431
4432 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4433 let mut run_range = None;
4434 let full_range = mat.captures.iter().fold(
4435 Range {
4436 start: usize::MAX,
4437 end: 0,
4438 },
4439 |mut acc, next| {
4440 let byte_range = next.node.byte_range();
4441 if acc.start > byte_range.start {
4442 acc.start = byte_range.start;
4443 }
4444 if acc.end < byte_range.end {
4445 acc.end = byte_range.end;
4446 }
4447 acc
4448 },
4449 );
4450 if full_range.start > full_range.end {
4451 // We did not find a full spanning range of this match.
4452 return None;
4453 }
4454 let extra_captures: SmallVec<[_; 1]> =
4455 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4456 test_configs
4457 .extra_captures
4458 .get(capture.index as usize)
4459 .cloned()
4460 .and_then(|tag_name| match tag_name {
4461 RunnableCapture::Named(name) => {
4462 Some((capture.node.byte_range(), name))
4463 }
4464 RunnableCapture::Run => {
4465 let _ = run_range.insert(capture.node.byte_range());
4466 None
4467 }
4468 })
4469 }));
4470 let run_range = run_range?;
4471 let tags = test_configs
4472 .query
4473 .property_settings(mat.pattern_index)
4474 .iter()
4475 .filter_map(|property| {
4476 if *property.key == *"tag" {
4477 property
4478 .value
4479 .as_ref()
4480 .map(|value| RunnableTag(value.to_string().into()))
4481 } else {
4482 None
4483 }
4484 })
4485 .collect();
4486 let extra_captures = extra_captures
4487 .into_iter()
4488 .map(|(range, name)| {
4489 (
4490 name.to_string(),
4491 self.text_for_range(range).collect::<String>(),
4492 )
4493 })
4494 .collect();
4495 // All tags should have the same range.
4496 Some(RunnableRange {
4497 run_range,
4498 full_range,
4499 runnable: Runnable {
4500 tags,
4501 language: mat.language,
4502 buffer: self.remote_id(),
4503 },
4504 extra_captures,
4505 buffer_id: self.remote_id(),
4506 })
4507 });
4508
4509 syntax_matches.advance();
4510 if test_range.is_some() {
4511 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4512 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4513 return test_range;
4514 }
4515 }
4516 })
4517 }
4518
4519 /// Returns selections for remote peers intersecting the given range.
4520 #[allow(clippy::type_complexity)]
4521 pub fn selections_in_range(
4522 &self,
4523 range: Range<Anchor>,
4524 include_local: bool,
4525 ) -> impl Iterator<
4526 Item = (
4527 ReplicaId,
4528 bool,
4529 CursorShape,
4530 impl Iterator<Item = &Selection<Anchor>> + '_,
4531 ),
4532 > + '_ {
4533 self.remote_selections
4534 .iter()
4535 .filter(move |(replica_id, set)| {
4536 (include_local || **replica_id != self.text.replica_id())
4537 && !set.selections.is_empty()
4538 })
4539 .map(move |(replica_id, set)| {
4540 let start_ix = match set.selections.binary_search_by(|probe| {
4541 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4542 }) {
4543 Ok(ix) | Err(ix) => ix,
4544 };
4545 let end_ix = match set.selections.binary_search_by(|probe| {
4546 probe.start.cmp(&range.end, self).then(Ordering::Less)
4547 }) {
4548 Ok(ix) | Err(ix) => ix,
4549 };
4550
4551 (
4552 *replica_id,
4553 set.line_mode,
4554 set.cursor_shape,
4555 set.selections[start_ix..end_ix].iter(),
4556 )
4557 })
4558 }
4559
4560 /// Returns if the buffer contains any diagnostics.
4561 pub fn has_diagnostics(&self) -> bool {
4562 !self.diagnostics.is_empty()
4563 }
4564
4565 /// Returns all the diagnostics intersecting the given range.
4566 pub fn diagnostics_in_range<'a, T, O>(
4567 &'a self,
4568 search_range: Range<T>,
4569 reversed: bool,
4570 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4571 where
4572 T: 'a + Clone + ToOffset,
4573 O: 'a + FromAnchor,
4574 {
4575 let mut iterators: Vec<_> = self
4576 .diagnostics
4577 .iter()
4578 .map(|(_, collection)| {
4579 collection
4580 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4581 .peekable()
4582 })
4583 .collect();
4584
4585 std::iter::from_fn(move || {
4586 let (next_ix, _) = iterators
4587 .iter_mut()
4588 .enumerate()
4589 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4590 .min_by(|(_, a), (_, b)| {
4591 let cmp = a
4592 .range
4593 .start
4594 .cmp(&b.range.start, self)
4595 // when range is equal, sort by diagnostic severity
4596 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4597 // and stabilize order with group_id
4598 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4599 if reversed { cmp.reverse() } else { cmp }
4600 })?;
4601 iterators[next_ix]
4602 .next()
4603 .map(
4604 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4605 diagnostic,
4606 range: FromAnchor::from_anchor(&range.start, self)
4607 ..FromAnchor::from_anchor(&range.end, self),
4608 },
4609 )
4610 })
4611 }
4612
4613 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4614 /// should be used instead.
4615 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4616 &self.diagnostics
4617 }
4618
4619 /// Returns all the diagnostic groups associated with the given
4620 /// language server ID. If no language server ID is provided,
4621 /// all diagnostics groups are returned.
4622 pub fn diagnostic_groups(
4623 &self,
4624 language_server_id: Option<LanguageServerId>,
4625 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4626 let mut groups = Vec::new();
4627
4628 if let Some(language_server_id) = language_server_id {
4629 if let Ok(ix) = self
4630 .diagnostics
4631 .binary_search_by_key(&language_server_id, |e| e.0)
4632 {
4633 self.diagnostics[ix]
4634 .1
4635 .groups(language_server_id, &mut groups, self);
4636 }
4637 } else {
4638 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4639 diagnostics.groups(*language_server_id, &mut groups, self);
4640 }
4641 }
4642
4643 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4644 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4645 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4646 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4647 });
4648
4649 groups
4650 }
4651
4652 /// Returns an iterator over the diagnostics for the given group.
4653 pub fn diagnostic_group<O>(
4654 &self,
4655 group_id: usize,
4656 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4657 where
4658 O: FromAnchor + 'static,
4659 {
4660 self.diagnostics
4661 .iter()
4662 .flat_map(move |(_, set)| set.group(group_id, self))
4663 }
4664
4665 /// An integer version number that accounts for all updates besides
4666 /// the buffer's text itself (which is versioned via a version vector).
4667 pub fn non_text_state_update_count(&self) -> usize {
4668 self.non_text_state_update_count
4669 }
4670
4671 /// An integer version that changes when the buffer's syntax changes.
4672 pub fn syntax_update_count(&self) -> usize {
4673 self.syntax.update_count()
4674 }
4675
4676 /// Returns a snapshot of underlying file.
4677 pub fn file(&self) -> Option<&Arc<dyn File>> {
4678 self.file.as_ref()
4679 }
4680
4681 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4682 if let Some(file) = self.file() {
4683 if file.path().file_name().is_none() || include_root {
4684 Some(file.full_path(cx).to_string_lossy().into_owned())
4685 } else {
4686 Some(file.path().display(file.path_style(cx)).to_string())
4687 }
4688 } else {
4689 None
4690 }
4691 }
4692
4693 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4694 let query_str = query.fuzzy_contents;
4695 if query_str.is_some_and(|query| query.is_empty()) {
4696 return BTreeMap::default();
4697 }
4698
4699 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4700 language,
4701 override_id: None,
4702 }));
4703
4704 let mut query_ix = 0;
4705 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4706 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4707
4708 let mut words = BTreeMap::default();
4709 let mut current_word_start_ix = None;
4710 let mut chunk_ix = query.range.start;
4711 for chunk in self.chunks(query.range, false) {
4712 for (i, c) in chunk.text.char_indices() {
4713 let ix = chunk_ix + i;
4714 if classifier.is_word(c) {
4715 if current_word_start_ix.is_none() {
4716 current_word_start_ix = Some(ix);
4717 }
4718
4719 if let Some(query_chars) = &query_chars
4720 && query_ix < query_len
4721 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4722 {
4723 query_ix += 1;
4724 }
4725 continue;
4726 } else if let Some(word_start) = current_word_start_ix.take()
4727 && query_ix == query_len
4728 {
4729 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4730 let mut word_text = self.text_for_range(word_start..ix).peekable();
4731 let first_char = word_text
4732 .peek()
4733 .and_then(|first_chunk| first_chunk.chars().next());
4734 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4735 if !query.skip_digits
4736 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4737 {
4738 words.insert(word_text.collect(), word_range);
4739 }
4740 }
4741 query_ix = 0;
4742 }
4743 chunk_ix += chunk.text.len();
4744 }
4745
4746 words
4747 }
4748}
4749
4750pub struct WordsQuery<'a> {
4751 /// Only returns words with all chars from the fuzzy string in them.
4752 pub fuzzy_contents: Option<&'a str>,
4753 /// Skips words that start with a digit.
4754 pub skip_digits: bool,
4755 /// Buffer offset range, to look for words.
4756 pub range: Range<usize>,
4757}
4758
4759fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4760 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4761}
4762
4763fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4764 let mut result = IndentSize::spaces(0);
4765 for c in text {
4766 let kind = match c {
4767 ' ' => IndentKind::Space,
4768 '\t' => IndentKind::Tab,
4769 _ => break,
4770 };
4771 if result.len == 0 {
4772 result.kind = kind;
4773 }
4774 result.len += 1;
4775 }
4776 result
4777}
4778
4779impl Clone for BufferSnapshot {
4780 fn clone(&self) -> Self {
4781 Self {
4782 text: self.text.clone(),
4783 syntax: self.syntax.clone(),
4784 file: self.file.clone(),
4785 remote_selections: self.remote_selections.clone(),
4786 diagnostics: self.diagnostics.clone(),
4787 language: self.language.clone(),
4788 non_text_state_update_count: self.non_text_state_update_count,
4789 }
4790 }
4791}
4792
4793impl Deref for BufferSnapshot {
4794 type Target = text::BufferSnapshot;
4795
4796 fn deref(&self) -> &Self::Target {
4797 &self.text
4798 }
4799}
4800
4801unsafe impl Send for BufferChunks<'_> {}
4802
4803impl<'a> BufferChunks<'a> {
4804 pub(crate) fn new(
4805 text: &'a Rope,
4806 range: Range<usize>,
4807 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4808 diagnostics: bool,
4809 buffer_snapshot: Option<&'a BufferSnapshot>,
4810 ) -> Self {
4811 let mut highlights = None;
4812 if let Some((captures, highlight_maps)) = syntax {
4813 highlights = Some(BufferChunkHighlights {
4814 captures,
4815 next_capture: None,
4816 stack: Default::default(),
4817 highlight_maps,
4818 })
4819 }
4820
4821 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4822 let chunks = text.chunks_in_range(range.clone());
4823
4824 let mut this = BufferChunks {
4825 range,
4826 buffer_snapshot,
4827 chunks,
4828 diagnostic_endpoints,
4829 error_depth: 0,
4830 warning_depth: 0,
4831 information_depth: 0,
4832 hint_depth: 0,
4833 unnecessary_depth: 0,
4834 underline: true,
4835 highlights,
4836 };
4837 this.initialize_diagnostic_endpoints();
4838 this
4839 }
4840
4841 /// Seeks to the given byte offset in the buffer.
4842 pub fn seek(&mut self, range: Range<usize>) {
4843 let old_range = std::mem::replace(&mut self.range, range.clone());
4844 self.chunks.set_range(self.range.clone());
4845 if let Some(highlights) = self.highlights.as_mut() {
4846 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4847 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4848 highlights
4849 .stack
4850 .retain(|(end_offset, _)| *end_offset > range.start);
4851 if let Some(capture) = &highlights.next_capture
4852 && range.start >= capture.node.start_byte()
4853 {
4854 let next_capture_end = capture.node.end_byte();
4855 if range.start < next_capture_end {
4856 highlights.stack.push((
4857 next_capture_end,
4858 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4859 ));
4860 }
4861 highlights.next_capture.take();
4862 }
4863 } else if let Some(snapshot) = self.buffer_snapshot {
4864 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4865 *highlights = BufferChunkHighlights {
4866 captures,
4867 next_capture: None,
4868 stack: Default::default(),
4869 highlight_maps,
4870 };
4871 } else {
4872 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4873 // Seeking such BufferChunks is not supported.
4874 debug_assert!(
4875 false,
4876 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4877 );
4878 }
4879
4880 highlights.captures.set_byte_range(self.range.clone());
4881 self.initialize_diagnostic_endpoints();
4882 }
4883 }
4884
4885 fn initialize_diagnostic_endpoints(&mut self) {
4886 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4887 && let Some(buffer) = self.buffer_snapshot
4888 {
4889 let mut diagnostic_endpoints = Vec::new();
4890 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4891 diagnostic_endpoints.push(DiagnosticEndpoint {
4892 offset: entry.range.start,
4893 is_start: true,
4894 severity: entry.diagnostic.severity,
4895 is_unnecessary: entry.diagnostic.is_unnecessary,
4896 underline: entry.diagnostic.underline,
4897 });
4898 diagnostic_endpoints.push(DiagnosticEndpoint {
4899 offset: entry.range.end,
4900 is_start: false,
4901 severity: entry.diagnostic.severity,
4902 is_unnecessary: entry.diagnostic.is_unnecessary,
4903 underline: entry.diagnostic.underline,
4904 });
4905 }
4906 diagnostic_endpoints
4907 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4908 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4909 self.hint_depth = 0;
4910 self.error_depth = 0;
4911 self.warning_depth = 0;
4912 self.information_depth = 0;
4913 }
4914 }
4915
4916 /// The current byte offset in the buffer.
4917 pub fn offset(&self) -> usize {
4918 self.range.start
4919 }
4920
4921 pub fn range(&self) -> Range<usize> {
4922 self.range.clone()
4923 }
4924
4925 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4926 let depth = match endpoint.severity {
4927 DiagnosticSeverity::ERROR => &mut self.error_depth,
4928 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4929 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4930 DiagnosticSeverity::HINT => &mut self.hint_depth,
4931 _ => return,
4932 };
4933 if endpoint.is_start {
4934 *depth += 1;
4935 } else {
4936 *depth -= 1;
4937 }
4938
4939 if endpoint.is_unnecessary {
4940 if endpoint.is_start {
4941 self.unnecessary_depth += 1;
4942 } else {
4943 self.unnecessary_depth -= 1;
4944 }
4945 }
4946 }
4947
4948 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4949 if self.error_depth > 0 {
4950 Some(DiagnosticSeverity::ERROR)
4951 } else if self.warning_depth > 0 {
4952 Some(DiagnosticSeverity::WARNING)
4953 } else if self.information_depth > 0 {
4954 Some(DiagnosticSeverity::INFORMATION)
4955 } else if self.hint_depth > 0 {
4956 Some(DiagnosticSeverity::HINT)
4957 } else {
4958 None
4959 }
4960 }
4961
4962 fn current_code_is_unnecessary(&self) -> bool {
4963 self.unnecessary_depth > 0
4964 }
4965}
4966
4967impl<'a> Iterator for BufferChunks<'a> {
4968 type Item = Chunk<'a>;
4969
4970 fn next(&mut self) -> Option<Self::Item> {
4971 let mut next_capture_start = usize::MAX;
4972 let mut next_diagnostic_endpoint = usize::MAX;
4973
4974 if let Some(highlights) = self.highlights.as_mut() {
4975 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4976 if *parent_capture_end <= self.range.start {
4977 highlights.stack.pop();
4978 } else {
4979 break;
4980 }
4981 }
4982
4983 if highlights.next_capture.is_none() {
4984 highlights.next_capture = highlights.captures.next();
4985 }
4986
4987 while let Some(capture) = highlights.next_capture.as_ref() {
4988 if self.range.start < capture.node.start_byte() {
4989 next_capture_start = capture.node.start_byte();
4990 break;
4991 } else {
4992 let highlight_id =
4993 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4994 highlights
4995 .stack
4996 .push((capture.node.end_byte(), highlight_id));
4997 highlights.next_capture = highlights.captures.next();
4998 }
4999 }
5000 }
5001
5002 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5003 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5004 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5005 if endpoint.offset <= self.range.start {
5006 self.update_diagnostic_depths(endpoint);
5007 diagnostic_endpoints.next();
5008 self.underline = endpoint.underline;
5009 } else {
5010 next_diagnostic_endpoint = endpoint.offset;
5011 break;
5012 }
5013 }
5014 }
5015 self.diagnostic_endpoints = diagnostic_endpoints;
5016
5017 if let Some(ChunkBitmaps {
5018 text: chunk,
5019 chars: chars_map,
5020 tabs,
5021 }) = self.chunks.peek_with_bitmaps()
5022 {
5023 let chunk_start = self.range.start;
5024 let mut chunk_end = (self.chunks.offset() + chunk.len())
5025 .min(next_capture_start)
5026 .min(next_diagnostic_endpoint);
5027 let mut highlight_id = None;
5028 if let Some(highlights) = self.highlights.as_ref()
5029 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5030 {
5031 chunk_end = chunk_end.min(*parent_capture_end);
5032 highlight_id = Some(*parent_highlight_id);
5033 }
5034 let bit_start = chunk_start - self.chunks.offset();
5035 let bit_end = chunk_end - self.chunks.offset();
5036
5037 let slice = &chunk[bit_start..bit_end];
5038
5039 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5040 let tabs = (tabs >> bit_start) & mask;
5041 let chars = (chars_map >> bit_start) & mask;
5042
5043 self.range.start = chunk_end;
5044 if self.range.start == self.chunks.offset() + chunk.len() {
5045 self.chunks.next().unwrap();
5046 }
5047
5048 Some(Chunk {
5049 text: slice,
5050 syntax_highlight_id: highlight_id,
5051 underline: self.underline,
5052 diagnostic_severity: self.current_diagnostic_severity(),
5053 is_unnecessary: self.current_code_is_unnecessary(),
5054 tabs,
5055 chars,
5056 ..Chunk::default()
5057 })
5058 } else {
5059 None
5060 }
5061 }
5062}
5063
5064impl operation_queue::Operation for Operation {
5065 fn lamport_timestamp(&self) -> clock::Lamport {
5066 match self {
5067 Operation::Buffer(_) => {
5068 unreachable!("buffer operations should never be deferred at this layer")
5069 }
5070 Operation::UpdateDiagnostics {
5071 lamport_timestamp, ..
5072 }
5073 | Operation::UpdateSelections {
5074 lamport_timestamp, ..
5075 }
5076 | Operation::UpdateCompletionTriggers {
5077 lamport_timestamp, ..
5078 }
5079 | Operation::UpdateLineEnding {
5080 lamport_timestamp, ..
5081 } => *lamport_timestamp,
5082 }
5083 }
5084}
5085
5086impl Default for Diagnostic {
5087 fn default() -> Self {
5088 Self {
5089 source: Default::default(),
5090 source_kind: DiagnosticSourceKind::Other,
5091 code: None,
5092 code_description: None,
5093 severity: DiagnosticSeverity::ERROR,
5094 message: Default::default(),
5095 markdown: None,
5096 group_id: 0,
5097 is_primary: false,
5098 is_disk_based: false,
5099 is_unnecessary: false,
5100 underline: true,
5101 data: None,
5102 }
5103 }
5104}
5105
5106impl IndentSize {
5107 /// Returns an [`IndentSize`] representing the given spaces.
5108 pub fn spaces(len: u32) -> Self {
5109 Self {
5110 len,
5111 kind: IndentKind::Space,
5112 }
5113 }
5114
5115 /// Returns an [`IndentSize`] representing a tab.
5116 pub fn tab() -> Self {
5117 Self {
5118 len: 1,
5119 kind: IndentKind::Tab,
5120 }
5121 }
5122
5123 /// An iterator over the characters represented by this [`IndentSize`].
5124 pub fn chars(&self) -> impl Iterator<Item = char> {
5125 iter::repeat(self.char()).take(self.len as usize)
5126 }
5127
5128 /// The character representation of this [`IndentSize`].
5129 pub fn char(&self) -> char {
5130 match self.kind {
5131 IndentKind::Space => ' ',
5132 IndentKind::Tab => '\t',
5133 }
5134 }
5135
5136 /// Consumes the current [`IndentSize`] and returns a new one that has
5137 /// been shrunk or enlarged by the given size along the given direction.
5138 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5139 match direction {
5140 Ordering::Less => {
5141 if self.kind == size.kind && self.len >= size.len {
5142 self.len -= size.len;
5143 }
5144 }
5145 Ordering::Equal => {}
5146 Ordering::Greater => {
5147 if self.len == 0 {
5148 self = size;
5149 } else if self.kind == size.kind {
5150 self.len += size.len;
5151 }
5152 }
5153 }
5154 self
5155 }
5156
5157 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5158 match self.kind {
5159 IndentKind::Space => self.len as usize,
5160 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5161 }
5162 }
5163}
5164
5165#[cfg(any(test, feature = "test-support"))]
5166pub struct TestFile {
5167 pub path: Arc<RelPath>,
5168 pub root_name: String,
5169 pub local_root: Option<PathBuf>,
5170}
5171
5172#[cfg(any(test, feature = "test-support"))]
5173impl File for TestFile {
5174 fn path(&self) -> &Arc<RelPath> {
5175 &self.path
5176 }
5177
5178 fn full_path(&self, _: &gpui::App) -> PathBuf {
5179 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5180 }
5181
5182 fn as_local(&self) -> Option<&dyn LocalFile> {
5183 if self.local_root.is_some() {
5184 Some(self)
5185 } else {
5186 None
5187 }
5188 }
5189
5190 fn disk_state(&self) -> DiskState {
5191 unimplemented!()
5192 }
5193
5194 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5195 self.path().file_name().unwrap_or(self.root_name.as_ref())
5196 }
5197
5198 fn worktree_id(&self, _: &App) -> WorktreeId {
5199 WorktreeId::from_usize(0)
5200 }
5201
5202 fn to_proto(&self, _: &App) -> rpc::proto::File {
5203 unimplemented!()
5204 }
5205
5206 fn is_private(&self) -> bool {
5207 false
5208 }
5209
5210 fn path_style(&self, _cx: &App) -> PathStyle {
5211 PathStyle::local()
5212 }
5213}
5214
5215#[cfg(any(test, feature = "test-support"))]
5216impl LocalFile for TestFile {
5217 fn abs_path(&self, _cx: &App) -> PathBuf {
5218 PathBuf::from(self.local_root.as_ref().unwrap())
5219 .join(&self.root_name)
5220 .join(self.path.as_std_path())
5221 }
5222
5223 fn load(&self, _cx: &App) -> Task<Result<String>> {
5224 unimplemented!()
5225 }
5226
5227 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5228 unimplemented!()
5229 }
5230}
5231
5232pub(crate) fn contiguous_ranges(
5233 values: impl Iterator<Item = u32>,
5234 max_len: usize,
5235) -> impl Iterator<Item = Range<u32>> {
5236 let mut values = values;
5237 let mut current_range: Option<Range<u32>> = None;
5238 std::iter::from_fn(move || {
5239 loop {
5240 if let Some(value) = values.next() {
5241 if let Some(range) = &mut current_range
5242 && value == range.end
5243 && range.len() < max_len
5244 {
5245 range.end += 1;
5246 continue;
5247 }
5248
5249 let prev_range = current_range.clone();
5250 current_range = Some(value..(value + 1));
5251 if prev_range.is_some() {
5252 return prev_range;
5253 }
5254 } else {
5255 return current_range.take();
5256 }
5257 }
5258 })
5259}
5260
5261#[derive(Default, Debug)]
5262pub struct CharClassifier {
5263 scope: Option<LanguageScope>,
5264 scope_context: Option<CharScopeContext>,
5265 ignore_punctuation: bool,
5266}
5267
5268impl CharClassifier {
5269 pub fn new(scope: Option<LanguageScope>) -> Self {
5270 Self {
5271 scope,
5272 scope_context: None,
5273 ignore_punctuation: false,
5274 }
5275 }
5276
5277 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5278 Self {
5279 scope_context,
5280 ..self
5281 }
5282 }
5283
5284 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5285 Self {
5286 ignore_punctuation,
5287 ..self
5288 }
5289 }
5290
5291 pub fn is_whitespace(&self, c: char) -> bool {
5292 self.kind(c) == CharKind::Whitespace
5293 }
5294
5295 pub fn is_word(&self, c: char) -> bool {
5296 self.kind(c) == CharKind::Word
5297 }
5298
5299 pub fn is_punctuation(&self, c: char) -> bool {
5300 self.kind(c) == CharKind::Punctuation
5301 }
5302
5303 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5304 if c.is_alphanumeric() || c == '_' {
5305 return CharKind::Word;
5306 }
5307
5308 if let Some(scope) = &self.scope {
5309 let characters = match self.scope_context {
5310 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5311 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5312 None => scope.word_characters(),
5313 };
5314 if let Some(characters) = characters
5315 && characters.contains(&c)
5316 {
5317 return CharKind::Word;
5318 }
5319 }
5320
5321 if c.is_whitespace() {
5322 return CharKind::Whitespace;
5323 }
5324
5325 if ignore_punctuation {
5326 CharKind::Word
5327 } else {
5328 CharKind::Punctuation
5329 }
5330 }
5331
5332 pub fn kind(&self, c: char) -> CharKind {
5333 self.kind_with(c, self.ignore_punctuation)
5334 }
5335}
5336
5337/// Find all of the ranges of whitespace that occur at the ends of lines
5338/// in the given rope.
5339///
5340/// This could also be done with a regex search, but this implementation
5341/// avoids copying text.
5342pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5343 let mut ranges = Vec::new();
5344
5345 let mut offset = 0;
5346 let mut prev_chunk_trailing_whitespace_range = 0..0;
5347 for chunk in rope.chunks() {
5348 let mut prev_line_trailing_whitespace_range = 0..0;
5349 for (i, line) in chunk.split('\n').enumerate() {
5350 let line_end_offset = offset + line.len();
5351 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5352 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5353
5354 if i == 0 && trimmed_line_len == 0 {
5355 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5356 }
5357 if !prev_line_trailing_whitespace_range.is_empty() {
5358 ranges.push(prev_line_trailing_whitespace_range);
5359 }
5360
5361 offset = line_end_offset + 1;
5362 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5363 }
5364
5365 offset -= 1;
5366 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5367 }
5368
5369 if !prev_chunk_trailing_whitespace_range.is_empty() {
5370 ranges.push(prev_chunk_trailing_whitespace_range);
5371 }
5372
5373 ranges
5374}