1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Uri>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let language_registry = language_registry.clone();
978 syntax.reparse(&text, language_registry, language);
979 }
980 BufferSnapshot {
981 text,
982 syntax,
983 file: None,
984 diagnostics: Default::default(),
985 remote_selections: Default::default(),
986 language,
987 non_text_state_update_count: 0,
988 }
989 }
990 }
991
992 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
993 let entity_id = cx.reserve_entity::<Self>().entity_id();
994 let buffer_id = entity_id.as_non_zero_u64().into();
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
997 let syntax = SyntaxMap::new(&text).snapshot();
998 BufferSnapshot {
999 text,
1000 syntax,
1001 file: None,
1002 diagnostics: Default::default(),
1003 remote_selections: Default::default(),
1004 language: None,
1005 non_text_state_update_count: 0,
1006 }
1007 }
1008
1009 #[cfg(any(test, feature = "test-support"))]
1010 pub fn build_snapshot_sync(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> BufferSnapshot {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1019 let mut syntax = SyntaxMap::new(&text).snapshot();
1020 if let Some(language) = language.clone() {
1021 syntax.reparse(&text, language_registry, language);
1022 }
1023 BufferSnapshot {
1024 text,
1025 syntax,
1026 file: None,
1027 diagnostics: Default::default(),
1028 remote_selections: Default::default(),
1029 language,
1030 non_text_state_update_count: 0,
1031 }
1032 }
1033
1034 /// Retrieve a snapshot of the buffer's current state. This is computationally
1035 /// cheap, and allows reading from the buffer on a background thread.
1036 pub fn snapshot(&self) -> BufferSnapshot {
1037 let text = self.text.snapshot();
1038 let mut syntax_map = self.syntax_map.lock();
1039 syntax_map.interpolate(&text);
1040 let syntax = syntax_map.snapshot();
1041
1042 BufferSnapshot {
1043 text,
1044 syntax,
1045 file: self.file.clone(),
1046 remote_selections: self.remote_selections.clone(),
1047 diagnostics: self.diagnostics.clone(),
1048 language: self.language.clone(),
1049 non_text_state_update_count: self.non_text_state_update_count,
1050 }
1051 }
1052
1053 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1054 let this = cx.entity();
1055 cx.new(|cx| {
1056 let mut branch = Self {
1057 branch_state: Some(BufferBranchState {
1058 base_buffer: this.clone(),
1059 merged_operations: Default::default(),
1060 }),
1061 language: self.language.clone(),
1062 has_conflict: self.has_conflict,
1063 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1064 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1065 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1066 };
1067 if let Some(language_registry) = self.language_registry() {
1068 branch.set_language_registry(language_registry);
1069 }
1070
1071 // Reparse the branch buffer so that we get syntax highlighting immediately.
1072 branch.reparse(cx);
1073
1074 branch
1075 })
1076 }
1077
1078 pub fn preview_edits(
1079 &self,
1080 edits: Arc<[(Range<Anchor>, String)]>,
1081 cx: &App,
1082 ) -> Task<EditPreview> {
1083 let registry = self.language_registry();
1084 let language = self.language().cloned();
1085 let old_snapshot = self.text.snapshot();
1086 let mut branch_buffer = self.text.branch();
1087 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1088 cx.background_spawn(async move {
1089 if !edits.is_empty() {
1090 if let Some(language) = language.clone() {
1091 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1092 }
1093
1094 branch_buffer.edit(edits.iter().cloned());
1095 let snapshot = branch_buffer.snapshot();
1096 syntax_snapshot.interpolate(&snapshot);
1097
1098 if let Some(language) = language {
1099 syntax_snapshot.reparse(&snapshot, registry, language);
1100 }
1101 }
1102 EditPreview {
1103 old_snapshot,
1104 applied_edits_snapshot: branch_buffer.snapshot(),
1105 syntax_snapshot,
1106 }
1107 })
1108 }
1109
1110 /// Applies all of the changes in this buffer that intersect any of the
1111 /// given `ranges` to its base buffer.
1112 ///
1113 /// If `ranges` is empty, then all changes will be applied. This buffer must
1114 /// be a branch buffer to call this method.
1115 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1116 let Some(base_buffer) = self.base_buffer() else {
1117 debug_panic!("not a branch buffer");
1118 return;
1119 };
1120
1121 let mut ranges = if ranges.is_empty() {
1122 &[0..usize::MAX]
1123 } else {
1124 ranges.as_slice()
1125 }
1126 .iter()
1127 .peekable();
1128
1129 let mut edits = Vec::new();
1130 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1131 let mut is_included = false;
1132 while let Some(range) = ranges.peek() {
1133 if range.end < edit.new.start {
1134 ranges.next().unwrap();
1135 } else {
1136 if range.start <= edit.new.end {
1137 is_included = true;
1138 }
1139 break;
1140 }
1141 }
1142
1143 if is_included {
1144 edits.push((
1145 edit.old.clone(),
1146 self.text_for_range(edit.new.clone()).collect::<String>(),
1147 ));
1148 }
1149 }
1150
1151 let operation = base_buffer.update(cx, |base_buffer, cx| {
1152 // cx.emit(BufferEvent::DiffBaseChanged);
1153 base_buffer.edit(edits, None, cx)
1154 });
1155
1156 if let Some(operation) = operation
1157 && let Some(BufferBranchState {
1158 merged_operations, ..
1159 }) = &mut self.branch_state
1160 {
1161 merged_operations.push(operation);
1162 }
1163 }
1164
1165 fn on_base_buffer_event(
1166 &mut self,
1167 _: Entity<Buffer>,
1168 event: &BufferEvent,
1169 cx: &mut Context<Self>,
1170 ) {
1171 let BufferEvent::Operation { operation, .. } = event else {
1172 return;
1173 };
1174 let Some(BufferBranchState {
1175 merged_operations, ..
1176 }) = &mut self.branch_state
1177 else {
1178 return;
1179 };
1180
1181 let mut operation_to_undo = None;
1182 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1183 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1184 {
1185 merged_operations.remove(ix);
1186 operation_to_undo = Some(operation.timestamp);
1187 }
1188
1189 self.apply_ops([operation.clone()], cx);
1190
1191 if let Some(timestamp) = operation_to_undo {
1192 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1193 self.undo_operations(counts, cx);
1194 }
1195 }
1196
1197 #[cfg(test)]
1198 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1199 &self.text
1200 }
1201
1202 /// Retrieve a snapshot of the buffer's raw text, without any
1203 /// language-related state like the syntax tree or diagnostics.
1204 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1205 self.text.snapshot()
1206 }
1207
1208 /// The file associated with the buffer, if any.
1209 pub fn file(&self) -> Option<&Arc<dyn File>> {
1210 self.file.as_ref()
1211 }
1212
1213 /// The version of the buffer that was last saved or reloaded from disk.
1214 pub fn saved_version(&self) -> &clock::Global {
1215 &self.saved_version
1216 }
1217
1218 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1219 pub fn saved_mtime(&self) -> Option<MTime> {
1220 self.saved_mtime
1221 }
1222
1223 /// Assign a language to the buffer.
1224 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1225 self.non_text_state_update_count += 1;
1226 self.syntax_map.lock().clear(&self.text);
1227 self.language = language;
1228 self.was_changed();
1229 self.reparse(cx);
1230 cx.emit(BufferEvent::LanguageChanged);
1231 }
1232
1233 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1234 /// other languages if parts of the buffer are written in different languages.
1235 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1236 self.syntax_map
1237 .lock()
1238 .set_language_registry(language_registry);
1239 }
1240
1241 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1242 self.syntax_map.lock().language_registry()
1243 }
1244
1245 /// Assign the buffer a new [`Capability`].
1246 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1247 self.capability = capability;
1248 cx.emit(BufferEvent::CapabilityChanged)
1249 }
1250
1251 /// This method is called to signal that the buffer has been saved.
1252 pub fn did_save(
1253 &mut self,
1254 version: clock::Global,
1255 mtime: Option<MTime>,
1256 cx: &mut Context<Self>,
1257 ) {
1258 self.saved_version = version;
1259 self.has_unsaved_edits
1260 .set((self.saved_version().clone(), false));
1261 self.has_conflict = false;
1262 self.saved_mtime = mtime;
1263 self.was_changed();
1264 cx.emit(BufferEvent::Saved);
1265 cx.notify();
1266 }
1267
1268 /// This method is called to signal that the buffer has been discarded.
1269 pub fn discarded(&self, cx: &mut Context<Self>) {
1270 cx.emit(BufferEvent::Discarded);
1271 cx.notify();
1272 }
1273
1274 /// Reloads the contents of the buffer from disk.
1275 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1276 let (tx, rx) = futures::channel::oneshot::channel();
1277 let prev_version = self.text.version();
1278 self.reload_task = Some(cx.spawn(async move |this, cx| {
1279 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1280 let file = this.file.as_ref()?.as_local()?;
1281
1282 Some((file.disk_state().mtime(), file.load(cx)))
1283 })?
1284 else {
1285 return Ok(());
1286 };
1287
1288 let new_text = new_text.await?;
1289 let diff = this
1290 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1291 .await;
1292 this.update(cx, |this, cx| {
1293 if this.version() == diff.base_version {
1294 this.finalize_last_transaction();
1295 this.apply_diff(diff, cx);
1296 tx.send(this.finalize_last_transaction().cloned()).ok();
1297 this.has_conflict = false;
1298 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1299 } else {
1300 if !diff.edits.is_empty()
1301 || this
1302 .edits_since::<usize>(&diff.base_version)
1303 .next()
1304 .is_some()
1305 {
1306 this.has_conflict = true;
1307 }
1308
1309 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1310 }
1311
1312 this.reload_task.take();
1313 })
1314 }));
1315 rx
1316 }
1317
1318 /// This method is called to signal that the buffer has been reloaded.
1319 pub fn did_reload(
1320 &mut self,
1321 version: clock::Global,
1322 line_ending: LineEnding,
1323 mtime: Option<MTime>,
1324 cx: &mut Context<Self>,
1325 ) {
1326 self.saved_version = version;
1327 self.has_unsaved_edits
1328 .set((self.saved_version.clone(), false));
1329 self.text.set_line_ending(line_ending);
1330 self.saved_mtime = mtime;
1331 cx.emit(BufferEvent::Reloaded);
1332 cx.notify();
1333 }
1334
1335 /// Updates the [`File`] backing this buffer. This should be called when
1336 /// the file has changed or has been deleted.
1337 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1338 let was_dirty = self.is_dirty();
1339 let mut file_changed = false;
1340
1341 if let Some(old_file) = self.file.as_ref() {
1342 if new_file.path() != old_file.path() {
1343 file_changed = true;
1344 }
1345
1346 let old_state = old_file.disk_state();
1347 let new_state = new_file.disk_state();
1348 if old_state != new_state {
1349 file_changed = true;
1350 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1351 cx.emit(BufferEvent::ReloadNeeded)
1352 }
1353 }
1354 } else {
1355 file_changed = true;
1356 };
1357
1358 self.file = Some(new_file);
1359 if file_changed {
1360 self.was_changed();
1361 self.non_text_state_update_count += 1;
1362 if was_dirty != self.is_dirty() {
1363 cx.emit(BufferEvent::DirtyChanged);
1364 }
1365 cx.emit(BufferEvent::FileHandleChanged);
1366 cx.notify();
1367 }
1368 }
1369
1370 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1371 Some(self.branch_state.as_ref()?.base_buffer.clone())
1372 }
1373
1374 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1375 pub fn language(&self) -> Option<&Arc<Language>> {
1376 self.language.as_ref()
1377 }
1378
1379 /// Returns the [`Language`] at the given location.
1380 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1381 let offset = position.to_offset(self);
1382 let mut is_first = true;
1383 let start_anchor = self.anchor_before(offset);
1384 let end_anchor = self.anchor_after(offset);
1385 self.syntax_map
1386 .lock()
1387 .layers_for_range(offset..offset, &self.text, false)
1388 .filter(|layer| {
1389 if is_first {
1390 is_first = false;
1391 return true;
1392 }
1393
1394 layer
1395 .included_sub_ranges
1396 .map(|sub_ranges| {
1397 sub_ranges.iter().any(|sub_range| {
1398 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1399 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1400 !is_before_start && !is_after_end
1401 })
1402 })
1403 .unwrap_or(true)
1404 })
1405 .last()
1406 .map(|info| info.language.clone())
1407 .or_else(|| self.language.clone())
1408 }
1409
1410 /// Returns each [`Language`] for the active syntax layers at the given location.
1411 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1412 let offset = position.to_offset(self);
1413 let mut languages: Vec<Arc<Language>> = self
1414 .syntax_map
1415 .lock()
1416 .layers_for_range(offset..offset, &self.text, false)
1417 .map(|info| info.language.clone())
1418 .collect();
1419
1420 if languages.is_empty()
1421 && let Some(buffer_language) = self.language()
1422 {
1423 languages.push(buffer_language.clone());
1424 }
1425
1426 languages
1427 }
1428
1429 /// An integer version number that accounts for all updates besides
1430 /// the buffer's text itself (which is versioned via a version vector).
1431 pub fn non_text_state_update_count(&self) -> usize {
1432 self.non_text_state_update_count
1433 }
1434
1435 /// Whether the buffer is being parsed in the background.
1436 #[cfg(any(test, feature = "test-support"))]
1437 pub fn is_parsing(&self) -> bool {
1438 self.reparse.is_some()
1439 }
1440
1441 /// Indicates whether the buffer contains any regions that may be
1442 /// written in a language that hasn't been loaded yet.
1443 pub fn contains_unknown_injections(&self) -> bool {
1444 self.syntax_map.lock().contains_unknown_injections()
1445 }
1446
1447 #[cfg(any(test, feature = "test-support"))]
1448 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1449 self.sync_parse_timeout = timeout;
1450 }
1451
1452 /// Called after an edit to synchronize the buffer's main parse tree with
1453 /// the buffer's new underlying state.
1454 ///
1455 /// Locks the syntax map and interpolates the edits since the last reparse
1456 /// into the foreground syntax tree.
1457 ///
1458 /// Then takes a stable snapshot of the syntax map before unlocking it.
1459 /// The snapshot with the interpolated edits is sent to a background thread,
1460 /// where we ask Tree-sitter to perform an incremental parse.
1461 ///
1462 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1463 /// waiting on the parse to complete. As soon as it completes, we proceed
1464 /// synchronously, unless a 1ms timeout elapses.
1465 ///
1466 /// If we time out waiting on the parse, we spawn a second task waiting
1467 /// until the parse does complete and return with the interpolated tree still
1468 /// in the foreground. When the background parse completes, call back into
1469 /// the main thread and assign the foreground parse state.
1470 ///
1471 /// If the buffer or grammar changed since the start of the background parse,
1472 /// initiate an additional reparse recursively. To avoid concurrent parses
1473 /// for the same buffer, we only initiate a new parse if we are not already
1474 /// parsing in the background.
1475 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1476 if self.reparse.is_some() {
1477 return;
1478 }
1479 let language = if let Some(language) = self.language.clone() {
1480 language
1481 } else {
1482 return;
1483 };
1484
1485 let text = self.text_snapshot();
1486 let parsed_version = self.version();
1487
1488 let mut syntax_map = self.syntax_map.lock();
1489 syntax_map.interpolate(&text);
1490 let language_registry = syntax_map.language_registry();
1491 let mut syntax_snapshot = syntax_map.snapshot();
1492 drop(syntax_map);
1493
1494 let parse_task = cx.background_spawn({
1495 let language = language.clone();
1496 let language_registry = language_registry.clone();
1497 async move {
1498 syntax_snapshot.reparse(&text, language_registry, language);
1499 syntax_snapshot
1500 }
1501 });
1502
1503 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1504 match cx
1505 .background_executor()
1506 .block_with_timeout(self.sync_parse_timeout, parse_task)
1507 {
1508 Ok(new_syntax_snapshot) => {
1509 self.did_finish_parsing(new_syntax_snapshot, cx);
1510 self.reparse = None;
1511 }
1512 Err(parse_task) => {
1513 self.reparse = Some(cx.spawn(async move |this, cx| {
1514 let new_syntax_map = parse_task.await;
1515 this.update(cx, move |this, cx| {
1516 let grammar_changed =
1517 this.language.as_ref().is_none_or(|current_language| {
1518 !Arc::ptr_eq(&language, current_language)
1519 });
1520 let language_registry_changed = new_syntax_map
1521 .contains_unknown_injections()
1522 && language_registry.is_some_and(|registry| {
1523 registry.version() != new_syntax_map.language_registry_version()
1524 });
1525 let parse_again = language_registry_changed
1526 || grammar_changed
1527 || this.version.changed_since(&parsed_version);
1528 this.did_finish_parsing(new_syntax_map, cx);
1529 this.reparse = None;
1530 if parse_again {
1531 this.reparse(cx);
1532 }
1533 })
1534 .ok();
1535 }));
1536 }
1537 }
1538 }
1539
1540 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1541 self.was_changed();
1542 self.non_text_state_update_count += 1;
1543 self.syntax_map.lock().did_parse(syntax_snapshot);
1544 self.request_autoindent(cx);
1545 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1546 cx.emit(BufferEvent::Reparsed);
1547 cx.notify();
1548 }
1549
1550 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1551 self.parse_status.1.clone()
1552 }
1553
1554 /// Assign to the buffer a set of diagnostics created by a given language server.
1555 pub fn update_diagnostics(
1556 &mut self,
1557 server_id: LanguageServerId,
1558 diagnostics: DiagnosticSet,
1559 cx: &mut Context<Self>,
1560 ) {
1561 let lamport_timestamp = self.text.lamport_clock.tick();
1562 let op = Operation::UpdateDiagnostics {
1563 server_id,
1564 diagnostics: diagnostics.iter().cloned().collect(),
1565 lamport_timestamp,
1566 };
1567
1568 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1569 self.send_operation(op, true, cx);
1570 }
1571
1572 pub fn buffer_diagnostics(
1573 &self,
1574 for_server: Option<LanguageServerId>,
1575 ) -> Vec<&DiagnosticEntry<Anchor>> {
1576 match for_server {
1577 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1578 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1579 Err(_) => Vec::new(),
1580 },
1581 None => self
1582 .diagnostics
1583 .iter()
1584 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1585 .collect(),
1586 }
1587 }
1588
1589 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1590 if let Some(indent_sizes) = self.compute_autoindents() {
1591 let indent_sizes = cx.background_spawn(indent_sizes);
1592 match cx
1593 .background_executor()
1594 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1595 {
1596 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1597 Err(indent_sizes) => {
1598 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1599 let indent_sizes = indent_sizes.await;
1600 this.update(cx, |this, cx| {
1601 this.apply_autoindents(indent_sizes, cx);
1602 })
1603 .ok();
1604 }));
1605 }
1606 }
1607 } else {
1608 self.autoindent_requests.clear();
1609 for tx in self.wait_for_autoindent_txs.drain(..) {
1610 tx.send(()).ok();
1611 }
1612 }
1613 }
1614
1615 fn compute_autoindents(
1616 &self,
1617 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1618 let max_rows_between_yields = 100;
1619 let snapshot = self.snapshot();
1620 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1621 return None;
1622 }
1623
1624 let autoindent_requests = self.autoindent_requests.clone();
1625 Some(async move {
1626 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1627 for request in autoindent_requests {
1628 // Resolve each edited range to its row in the current buffer and in the
1629 // buffer before this batch of edits.
1630 let mut row_ranges = Vec::new();
1631 let mut old_to_new_rows = BTreeMap::new();
1632 let mut language_indent_sizes_by_new_row = Vec::new();
1633 for entry in &request.entries {
1634 let position = entry.range.start;
1635 let new_row = position.to_point(&snapshot).row;
1636 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1637 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1638
1639 if !entry.first_line_is_new {
1640 let old_row = position.to_point(&request.before_edit).row;
1641 old_to_new_rows.insert(old_row, new_row);
1642 }
1643 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1644 }
1645
1646 // Build a map containing the suggested indentation for each of the edited lines
1647 // with respect to the state of the buffer before these edits. This map is keyed
1648 // by the rows for these lines in the current state of the buffer.
1649 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1650 let old_edited_ranges =
1651 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1652 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1653 let mut language_indent_size = IndentSize::default();
1654 for old_edited_range in old_edited_ranges {
1655 let suggestions = request
1656 .before_edit
1657 .suggest_autoindents(old_edited_range.clone())
1658 .into_iter()
1659 .flatten();
1660 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1661 if let Some(suggestion) = suggestion {
1662 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1663
1664 // Find the indent size based on the language for this row.
1665 while let Some((row, size)) = language_indent_sizes.peek() {
1666 if *row > new_row {
1667 break;
1668 }
1669 language_indent_size = *size;
1670 language_indent_sizes.next();
1671 }
1672
1673 let suggested_indent = old_to_new_rows
1674 .get(&suggestion.basis_row)
1675 .and_then(|from_row| {
1676 Some(old_suggestions.get(from_row).copied()?.0)
1677 })
1678 .unwrap_or_else(|| {
1679 request
1680 .before_edit
1681 .indent_size_for_line(suggestion.basis_row)
1682 })
1683 .with_delta(suggestion.delta, language_indent_size);
1684 old_suggestions
1685 .insert(new_row, (suggested_indent, suggestion.within_error));
1686 }
1687 }
1688 yield_now().await;
1689 }
1690
1691 // Compute new suggestions for each line, but only include them in the result
1692 // if they differ from the old suggestion for that line.
1693 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1694 let mut language_indent_size = IndentSize::default();
1695 for (row_range, original_indent_column) in row_ranges {
1696 let new_edited_row_range = if request.is_block_mode {
1697 row_range.start..row_range.start + 1
1698 } else {
1699 row_range.clone()
1700 };
1701
1702 let suggestions = snapshot
1703 .suggest_autoindents(new_edited_row_range.clone())
1704 .into_iter()
1705 .flatten();
1706 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1707 if let Some(suggestion) = suggestion {
1708 // Find the indent size based on the language for this row.
1709 while let Some((row, size)) = language_indent_sizes.peek() {
1710 if *row > new_row {
1711 break;
1712 }
1713 language_indent_size = *size;
1714 language_indent_sizes.next();
1715 }
1716
1717 let suggested_indent = indent_sizes
1718 .get(&suggestion.basis_row)
1719 .copied()
1720 .map(|e| e.0)
1721 .unwrap_or_else(|| {
1722 snapshot.indent_size_for_line(suggestion.basis_row)
1723 })
1724 .with_delta(suggestion.delta, language_indent_size);
1725
1726 if old_suggestions.get(&new_row).is_none_or(
1727 |(old_indentation, was_within_error)| {
1728 suggested_indent != *old_indentation
1729 && (!suggestion.within_error || *was_within_error)
1730 },
1731 ) {
1732 indent_sizes.insert(
1733 new_row,
1734 (suggested_indent, request.ignore_empty_lines),
1735 );
1736 }
1737 }
1738 }
1739
1740 if let (true, Some(original_indent_column)) =
1741 (request.is_block_mode, original_indent_column)
1742 {
1743 let new_indent =
1744 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1745 *indent
1746 } else {
1747 snapshot.indent_size_for_line(row_range.start)
1748 };
1749 let delta = new_indent.len as i64 - original_indent_column as i64;
1750 if delta != 0 {
1751 for row in row_range.skip(1) {
1752 indent_sizes.entry(row).or_insert_with(|| {
1753 let mut size = snapshot.indent_size_for_line(row);
1754 if size.kind == new_indent.kind {
1755 match delta.cmp(&0) {
1756 Ordering::Greater => size.len += delta as u32,
1757 Ordering::Less => {
1758 size.len = size.len.saturating_sub(-delta as u32)
1759 }
1760 Ordering::Equal => {}
1761 }
1762 }
1763 (size, request.ignore_empty_lines)
1764 });
1765 }
1766 }
1767 }
1768
1769 yield_now().await;
1770 }
1771 }
1772
1773 indent_sizes
1774 .into_iter()
1775 .filter_map(|(row, (indent, ignore_empty_lines))| {
1776 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1777 None
1778 } else {
1779 Some((row, indent))
1780 }
1781 })
1782 .collect()
1783 })
1784 }
1785
1786 fn apply_autoindents(
1787 &mut self,
1788 indent_sizes: BTreeMap<u32, IndentSize>,
1789 cx: &mut Context<Self>,
1790 ) {
1791 self.autoindent_requests.clear();
1792 for tx in self.wait_for_autoindent_txs.drain(..) {
1793 tx.send(()).ok();
1794 }
1795
1796 let edits: Vec<_> = indent_sizes
1797 .into_iter()
1798 .filter_map(|(row, indent_size)| {
1799 let current_size = indent_size_for_line(self, row);
1800 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1801 })
1802 .collect();
1803
1804 let preserve_preview = self.preserve_preview();
1805 self.edit(edits, None, cx);
1806 if preserve_preview {
1807 self.refresh_preview();
1808 }
1809 }
1810
1811 /// Create a minimal edit that will cause the given row to be indented
1812 /// with the given size. After applying this edit, the length of the line
1813 /// will always be at least `new_size.len`.
1814 pub fn edit_for_indent_size_adjustment(
1815 row: u32,
1816 current_size: IndentSize,
1817 new_size: IndentSize,
1818 ) -> Option<(Range<Point>, String)> {
1819 if new_size.kind == current_size.kind {
1820 match new_size.len.cmp(¤t_size.len) {
1821 Ordering::Greater => {
1822 let point = Point::new(row, 0);
1823 Some((
1824 point..point,
1825 iter::repeat(new_size.char())
1826 .take((new_size.len - current_size.len) as usize)
1827 .collect::<String>(),
1828 ))
1829 }
1830
1831 Ordering::Less => Some((
1832 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1833 String::new(),
1834 )),
1835
1836 Ordering::Equal => None,
1837 }
1838 } else {
1839 Some((
1840 Point::new(row, 0)..Point::new(row, current_size.len),
1841 iter::repeat(new_size.char())
1842 .take(new_size.len as usize)
1843 .collect::<String>(),
1844 ))
1845 }
1846 }
1847
1848 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1849 /// and the given new text.
1850 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1851 let old_text = self.as_rope().clone();
1852 let base_version = self.version();
1853 cx.background_executor()
1854 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1855 let old_text = old_text.to_string();
1856 let line_ending = LineEnding::detect(&new_text);
1857 LineEnding::normalize(&mut new_text);
1858 let edits = text_diff(&old_text, &new_text);
1859 Diff {
1860 base_version,
1861 line_ending,
1862 edits,
1863 }
1864 })
1865 }
1866
1867 /// Spawns a background task that searches the buffer for any whitespace
1868 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1869 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1870 let old_text = self.as_rope().clone();
1871 let line_ending = self.line_ending();
1872 let base_version = self.version();
1873 cx.background_spawn(async move {
1874 let ranges = trailing_whitespace_ranges(&old_text);
1875 let empty = Arc::<str>::from("");
1876 Diff {
1877 base_version,
1878 line_ending,
1879 edits: ranges
1880 .into_iter()
1881 .map(|range| (range, empty.clone()))
1882 .collect(),
1883 }
1884 })
1885 }
1886
1887 /// Ensures that the buffer ends with a single newline character, and
1888 /// no other whitespace. Skips if the buffer is empty.
1889 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1890 let len = self.len();
1891 if len == 0 {
1892 return;
1893 }
1894 let mut offset = len;
1895 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1896 let non_whitespace_len = chunk
1897 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1898 .len();
1899 offset -= chunk.len();
1900 offset += non_whitespace_len;
1901 if non_whitespace_len != 0 {
1902 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1903 return;
1904 }
1905 break;
1906 }
1907 }
1908 self.edit([(offset..len, "\n")], None, cx);
1909 }
1910
1911 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1912 /// calculated, then adjust the diff to account for those changes, and discard any
1913 /// parts of the diff that conflict with those changes.
1914 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1915 let snapshot = self.snapshot();
1916 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1917 let mut delta = 0;
1918 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1919 while let Some(edit_since) = edits_since.peek() {
1920 // If the edit occurs after a diff hunk, then it does not
1921 // affect that hunk.
1922 if edit_since.old.start > range.end {
1923 break;
1924 }
1925 // If the edit precedes the diff hunk, then adjust the hunk
1926 // to reflect the edit.
1927 else if edit_since.old.end < range.start {
1928 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1929 edits_since.next();
1930 }
1931 // If the edit intersects a diff hunk, then discard that hunk.
1932 else {
1933 return None;
1934 }
1935 }
1936
1937 let start = (range.start as i64 + delta) as usize;
1938 let end = (range.end as i64 + delta) as usize;
1939 Some((start..end, new_text))
1940 });
1941
1942 self.start_transaction();
1943 self.text.set_line_ending(diff.line_ending);
1944 self.edit(adjusted_edits, None, cx);
1945 self.end_transaction(cx)
1946 }
1947
1948 fn has_unsaved_edits(&self) -> bool {
1949 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1950
1951 if last_version == self.version {
1952 self.has_unsaved_edits
1953 .set((last_version, has_unsaved_edits));
1954 return has_unsaved_edits;
1955 }
1956
1957 let has_edits = self.has_edits_since(&self.saved_version);
1958 self.has_unsaved_edits
1959 .set((self.version.clone(), has_edits));
1960 has_edits
1961 }
1962
1963 /// Checks if the buffer has unsaved changes.
1964 pub fn is_dirty(&self) -> bool {
1965 if self.capability == Capability::ReadOnly {
1966 return false;
1967 }
1968 if self.has_conflict {
1969 return true;
1970 }
1971 match self.file.as_ref().map(|f| f.disk_state()) {
1972 Some(DiskState::New) | Some(DiskState::Deleted) => {
1973 !self.is_empty() && self.has_unsaved_edits()
1974 }
1975 _ => self.has_unsaved_edits(),
1976 }
1977 }
1978
1979 /// Checks if the buffer and its file have both changed since the buffer
1980 /// was last saved or reloaded.
1981 pub fn has_conflict(&self) -> bool {
1982 if self.has_conflict {
1983 return true;
1984 }
1985 let Some(file) = self.file.as_ref() else {
1986 return false;
1987 };
1988 match file.disk_state() {
1989 DiskState::New => false,
1990 DiskState::Present { mtime } => match self.saved_mtime {
1991 Some(saved_mtime) => {
1992 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1993 }
1994 None => true,
1995 },
1996 DiskState::Deleted => false,
1997 }
1998 }
1999
2000 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2001 pub fn subscribe(&mut self) -> Subscription {
2002 self.text.subscribe()
2003 }
2004
2005 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2006 ///
2007 /// This allows downstream code to check if the buffer's text has changed without
2008 /// waiting for an effect cycle, which would be required if using eents.
2009 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2010 if let Err(ix) = self
2011 .change_bits
2012 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2013 {
2014 self.change_bits.insert(ix, bit);
2015 }
2016 }
2017
2018 fn was_changed(&mut self) {
2019 self.change_bits.retain(|change_bit| {
2020 change_bit.upgrade().is_some_and(|bit| {
2021 bit.replace(true);
2022 true
2023 })
2024 });
2025 }
2026
2027 /// Starts a transaction, if one is not already in-progress. When undoing or
2028 /// redoing edits, all of the edits performed within a transaction are undone
2029 /// or redone together.
2030 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2031 self.start_transaction_at(Instant::now())
2032 }
2033
2034 /// Starts a transaction, providing the current time. Subsequent transactions
2035 /// that occur within a short period of time will be grouped together. This
2036 /// is controlled by the buffer's undo grouping duration.
2037 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2038 self.transaction_depth += 1;
2039 if self.was_dirty_before_starting_transaction.is_none() {
2040 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2041 }
2042 self.text.start_transaction_at(now)
2043 }
2044
2045 /// Terminates the current transaction, if this is the outermost transaction.
2046 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2047 self.end_transaction_at(Instant::now(), cx)
2048 }
2049
2050 /// Terminates the current transaction, providing the current time. Subsequent transactions
2051 /// that occur within a short period of time will be grouped together. This
2052 /// is controlled by the buffer's undo grouping duration.
2053 pub fn end_transaction_at(
2054 &mut self,
2055 now: Instant,
2056 cx: &mut Context<Self>,
2057 ) -> Option<TransactionId> {
2058 assert!(self.transaction_depth > 0);
2059 self.transaction_depth -= 1;
2060 let was_dirty = if self.transaction_depth == 0 {
2061 self.was_dirty_before_starting_transaction.take().unwrap()
2062 } else {
2063 false
2064 };
2065 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2066 self.did_edit(&start_version, was_dirty, cx);
2067 Some(transaction_id)
2068 } else {
2069 None
2070 }
2071 }
2072
2073 /// Manually add a transaction to the buffer's undo history.
2074 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2075 self.text.push_transaction(transaction, now);
2076 }
2077
2078 /// Differs from `push_transaction` in that it does not clear the redo
2079 /// stack. Intended to be used to create a parent transaction to merge
2080 /// potential child transactions into.
2081 ///
2082 /// The caller is responsible for removing it from the undo history using
2083 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2084 /// are merged into this transaction, the caller is responsible for ensuring
2085 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2086 /// cleared is to create transactions with the usual `start_transaction` and
2087 /// `end_transaction` methods and merging the resulting transactions into
2088 /// the transaction created by this method
2089 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2090 self.text.push_empty_transaction(now)
2091 }
2092
2093 /// Prevent the last transaction from being grouped with any subsequent transactions,
2094 /// even if they occur with the buffer's undo grouping duration.
2095 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2096 self.text.finalize_last_transaction()
2097 }
2098
2099 /// Manually group all changes since a given transaction.
2100 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2101 self.text.group_until_transaction(transaction_id);
2102 }
2103
2104 /// Manually remove a transaction from the buffer's undo history
2105 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2106 self.text.forget_transaction(transaction_id)
2107 }
2108
2109 /// Retrieve a transaction from the buffer's undo history
2110 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2111 self.text.get_transaction(transaction_id)
2112 }
2113
2114 /// Manually merge two transactions in the buffer's undo history.
2115 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2116 self.text.merge_transactions(transaction, destination);
2117 }
2118
2119 /// Waits for the buffer to receive operations with the given timestamps.
2120 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2121 &mut self,
2122 edit_ids: It,
2123 ) -> impl Future<Output = Result<()>> + use<It> {
2124 self.text.wait_for_edits(edit_ids)
2125 }
2126
2127 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2128 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2129 &mut self,
2130 anchors: It,
2131 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2132 self.text.wait_for_anchors(anchors)
2133 }
2134
2135 /// Waits for the buffer to receive operations up to the given version.
2136 pub fn wait_for_version(
2137 &mut self,
2138 version: clock::Global,
2139 ) -> impl Future<Output = Result<()>> + use<> {
2140 self.text.wait_for_version(version)
2141 }
2142
2143 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2144 /// [`Buffer::wait_for_version`] to resolve with an error.
2145 pub fn give_up_waiting(&mut self) {
2146 self.text.give_up_waiting();
2147 }
2148
2149 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2150 let mut rx = None;
2151 if !self.autoindent_requests.is_empty() {
2152 let channel = oneshot::channel();
2153 self.wait_for_autoindent_txs.push(channel.0);
2154 rx = Some(channel.1);
2155 }
2156 rx
2157 }
2158
2159 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2160 pub fn set_active_selections(
2161 &mut self,
2162 selections: Arc<[Selection<Anchor>]>,
2163 line_mode: bool,
2164 cursor_shape: CursorShape,
2165 cx: &mut Context<Self>,
2166 ) {
2167 let lamport_timestamp = self.text.lamport_clock.tick();
2168 self.remote_selections.insert(
2169 self.text.replica_id(),
2170 SelectionSet {
2171 selections: selections.clone(),
2172 lamport_timestamp,
2173 line_mode,
2174 cursor_shape,
2175 },
2176 );
2177 self.send_operation(
2178 Operation::UpdateSelections {
2179 selections,
2180 line_mode,
2181 lamport_timestamp,
2182 cursor_shape,
2183 },
2184 true,
2185 cx,
2186 );
2187 self.non_text_state_update_count += 1;
2188 cx.notify();
2189 }
2190
2191 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2192 /// this replica.
2193 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2194 if self
2195 .remote_selections
2196 .get(&self.text.replica_id())
2197 .is_none_or(|set| !set.selections.is_empty())
2198 {
2199 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2200 }
2201 }
2202
2203 pub fn set_agent_selections(
2204 &mut self,
2205 selections: Arc<[Selection<Anchor>]>,
2206 line_mode: bool,
2207 cursor_shape: CursorShape,
2208 cx: &mut Context<Self>,
2209 ) {
2210 let lamport_timestamp = self.text.lamport_clock.tick();
2211 self.remote_selections.insert(
2212 AGENT_REPLICA_ID,
2213 SelectionSet {
2214 selections,
2215 lamport_timestamp,
2216 line_mode,
2217 cursor_shape,
2218 },
2219 );
2220 self.non_text_state_update_count += 1;
2221 cx.notify();
2222 }
2223
2224 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2225 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2226 }
2227
2228 /// Replaces the buffer's entire text.
2229 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2230 where
2231 T: Into<Arc<str>>,
2232 {
2233 self.autoindent_requests.clear();
2234 self.edit([(0..self.len(), text)], None, cx)
2235 }
2236
2237 /// Appends the given text to the end of the buffer.
2238 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2239 where
2240 T: Into<Arc<str>>,
2241 {
2242 self.edit([(self.len()..self.len(), text)], None, cx)
2243 }
2244
2245 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2246 /// delete, and a string of text to insert at that location.
2247 ///
2248 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2249 /// request for the edited ranges, which will be processed when the buffer finishes
2250 /// parsing.
2251 ///
2252 /// Parsing takes place at the end of a transaction, and may compute synchronously
2253 /// or asynchronously, depending on the changes.
2254 pub fn edit<I, S, T>(
2255 &mut self,
2256 edits_iter: I,
2257 autoindent_mode: Option<AutoindentMode>,
2258 cx: &mut Context<Self>,
2259 ) -> Option<clock::Lamport>
2260 where
2261 I: IntoIterator<Item = (Range<S>, T)>,
2262 S: ToOffset,
2263 T: Into<Arc<str>>,
2264 {
2265 // Skip invalid edits and coalesce contiguous ones.
2266 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2267
2268 for (range, new_text) in edits_iter {
2269 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2270
2271 if range.start > range.end {
2272 mem::swap(&mut range.start, &mut range.end);
2273 }
2274 let new_text = new_text.into();
2275 if !new_text.is_empty() || !range.is_empty() {
2276 if let Some((prev_range, prev_text)) = edits.last_mut()
2277 && prev_range.end >= range.start
2278 {
2279 prev_range.end = cmp::max(prev_range.end, range.end);
2280 *prev_text = format!("{prev_text}{new_text}").into();
2281 } else {
2282 edits.push((range, new_text));
2283 }
2284 }
2285 }
2286 if edits.is_empty() {
2287 return None;
2288 }
2289
2290 self.start_transaction();
2291 self.pending_autoindent.take();
2292 let autoindent_request = autoindent_mode
2293 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2294
2295 let edit_operation = self.text.edit(edits.iter().cloned());
2296 let edit_id = edit_operation.timestamp();
2297
2298 if let Some((before_edit, mode)) = autoindent_request {
2299 let mut delta = 0isize;
2300 let mut previous_setting = None;
2301 let entries: Vec<_> = edits
2302 .into_iter()
2303 .enumerate()
2304 .zip(&edit_operation.as_edit().unwrap().new_text)
2305 .filter(|((_, (range, _)), _)| {
2306 let language = before_edit.language_at(range.start);
2307 let language_id = language.map(|l| l.id());
2308 if let Some((cached_language_id, auto_indent)) = previous_setting
2309 && cached_language_id == language_id
2310 {
2311 auto_indent
2312 } else {
2313 // The auto-indent setting is not present in editorconfigs, hence
2314 // we can avoid passing the file here.
2315 let auto_indent =
2316 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2317 previous_setting = Some((language_id, auto_indent));
2318 auto_indent
2319 }
2320 })
2321 .map(|((ix, (range, _)), new_text)| {
2322 let new_text_length = new_text.len();
2323 let old_start = range.start.to_point(&before_edit);
2324 let new_start = (delta + range.start as isize) as usize;
2325 let range_len = range.end - range.start;
2326 delta += new_text_length as isize - range_len as isize;
2327
2328 // Decide what range of the insertion to auto-indent, and whether
2329 // the first line of the insertion should be considered a newly-inserted line
2330 // or an edit to an existing line.
2331 let mut range_of_insertion_to_indent = 0..new_text_length;
2332 let mut first_line_is_new = true;
2333
2334 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2335 let old_line_end = before_edit.line_len(old_start.row);
2336
2337 if old_start.column > old_line_start {
2338 first_line_is_new = false;
2339 }
2340
2341 if !new_text.contains('\n')
2342 && (old_start.column + (range_len as u32) < old_line_end
2343 || old_line_end == old_line_start)
2344 {
2345 first_line_is_new = false;
2346 }
2347
2348 // When inserting text starting with a newline, avoid auto-indenting the
2349 // previous line.
2350 if new_text.starts_with('\n') {
2351 range_of_insertion_to_indent.start += 1;
2352 first_line_is_new = true;
2353 }
2354
2355 let mut original_indent_column = None;
2356 if let AutoindentMode::Block {
2357 original_indent_columns,
2358 } = &mode
2359 {
2360 original_indent_column = Some(if new_text.starts_with('\n') {
2361 indent_size_for_text(
2362 new_text[range_of_insertion_to_indent.clone()].chars(),
2363 )
2364 .len
2365 } else {
2366 original_indent_columns
2367 .get(ix)
2368 .copied()
2369 .flatten()
2370 .unwrap_or_else(|| {
2371 indent_size_for_text(
2372 new_text[range_of_insertion_to_indent.clone()].chars(),
2373 )
2374 .len
2375 })
2376 });
2377
2378 // Avoid auto-indenting the line after the edit.
2379 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2380 range_of_insertion_to_indent.end -= 1;
2381 }
2382 }
2383
2384 AutoindentRequestEntry {
2385 first_line_is_new,
2386 original_indent_column,
2387 indent_size: before_edit.language_indent_size_at(range.start, cx),
2388 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2389 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2390 }
2391 })
2392 .collect();
2393
2394 if !entries.is_empty() {
2395 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2396 before_edit,
2397 entries,
2398 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2399 ignore_empty_lines: false,
2400 }));
2401 }
2402 }
2403
2404 self.end_transaction(cx);
2405 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2406 Some(edit_id)
2407 }
2408
2409 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2410 self.was_changed();
2411
2412 if self.edits_since::<usize>(old_version).next().is_none() {
2413 return;
2414 }
2415
2416 self.reparse(cx);
2417 cx.emit(BufferEvent::Edited);
2418 if was_dirty != self.is_dirty() {
2419 cx.emit(BufferEvent::DirtyChanged);
2420 }
2421 cx.notify();
2422 }
2423
2424 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2425 where
2426 I: IntoIterator<Item = Range<T>>,
2427 T: ToOffset + Copy,
2428 {
2429 let before_edit = self.snapshot();
2430 let entries = ranges
2431 .into_iter()
2432 .map(|range| AutoindentRequestEntry {
2433 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2434 first_line_is_new: true,
2435 indent_size: before_edit.language_indent_size_at(range.start, cx),
2436 original_indent_column: None,
2437 })
2438 .collect();
2439 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2440 before_edit,
2441 entries,
2442 is_block_mode: false,
2443 ignore_empty_lines: true,
2444 }));
2445 self.request_autoindent(cx);
2446 }
2447
2448 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2449 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2450 pub fn insert_empty_line(
2451 &mut self,
2452 position: impl ToPoint,
2453 space_above: bool,
2454 space_below: bool,
2455 cx: &mut Context<Self>,
2456 ) -> Point {
2457 let mut position = position.to_point(self);
2458
2459 self.start_transaction();
2460
2461 self.edit(
2462 [(position..position, "\n")],
2463 Some(AutoindentMode::EachLine),
2464 cx,
2465 );
2466
2467 if position.column > 0 {
2468 position += Point::new(1, 0);
2469 }
2470
2471 if !self.is_line_blank(position.row) {
2472 self.edit(
2473 [(position..position, "\n")],
2474 Some(AutoindentMode::EachLine),
2475 cx,
2476 );
2477 }
2478
2479 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2480 self.edit(
2481 [(position..position, "\n")],
2482 Some(AutoindentMode::EachLine),
2483 cx,
2484 );
2485 position.row += 1;
2486 }
2487
2488 if space_below
2489 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2490 {
2491 self.edit(
2492 [(position..position, "\n")],
2493 Some(AutoindentMode::EachLine),
2494 cx,
2495 );
2496 }
2497
2498 self.end_transaction(cx);
2499
2500 position
2501 }
2502
2503 /// Applies the given remote operations to the buffer.
2504 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2505 self.pending_autoindent.take();
2506 let was_dirty = self.is_dirty();
2507 let old_version = self.version.clone();
2508 let mut deferred_ops = Vec::new();
2509 let buffer_ops = ops
2510 .into_iter()
2511 .filter_map(|op| match op {
2512 Operation::Buffer(op) => Some(op),
2513 _ => {
2514 if self.can_apply_op(&op) {
2515 self.apply_op(op, cx);
2516 } else {
2517 deferred_ops.push(op);
2518 }
2519 None
2520 }
2521 })
2522 .collect::<Vec<_>>();
2523 for operation in buffer_ops.iter() {
2524 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2525 }
2526 self.text.apply_ops(buffer_ops);
2527 self.deferred_ops.insert(deferred_ops);
2528 self.flush_deferred_ops(cx);
2529 self.did_edit(&old_version, was_dirty, cx);
2530 // Notify independently of whether the buffer was edited as the operations could include a
2531 // selection update.
2532 cx.notify();
2533 }
2534
2535 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2536 let mut deferred_ops = Vec::new();
2537 for op in self.deferred_ops.drain().iter().cloned() {
2538 if self.can_apply_op(&op) {
2539 self.apply_op(op, cx);
2540 } else {
2541 deferred_ops.push(op);
2542 }
2543 }
2544 self.deferred_ops.insert(deferred_ops);
2545 }
2546
2547 pub fn has_deferred_ops(&self) -> bool {
2548 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2549 }
2550
2551 fn can_apply_op(&self, operation: &Operation) -> bool {
2552 match operation {
2553 Operation::Buffer(_) => {
2554 unreachable!("buffer operations should never be applied at this layer")
2555 }
2556 Operation::UpdateDiagnostics {
2557 diagnostics: diagnostic_set,
2558 ..
2559 } => diagnostic_set.iter().all(|diagnostic| {
2560 self.text.can_resolve(&diagnostic.range.start)
2561 && self.text.can_resolve(&diagnostic.range.end)
2562 }),
2563 Operation::UpdateSelections { selections, .. } => selections
2564 .iter()
2565 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2566 Operation::UpdateCompletionTriggers { .. } => true,
2567 }
2568 }
2569
2570 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2571 match operation {
2572 Operation::Buffer(_) => {
2573 unreachable!("buffer operations should never be applied at this layer")
2574 }
2575 Operation::UpdateDiagnostics {
2576 server_id,
2577 diagnostics: diagnostic_set,
2578 lamport_timestamp,
2579 } => {
2580 let snapshot = self.snapshot();
2581 self.apply_diagnostic_update(
2582 server_id,
2583 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2584 lamport_timestamp,
2585 cx,
2586 );
2587 }
2588 Operation::UpdateSelections {
2589 selections,
2590 lamport_timestamp,
2591 line_mode,
2592 cursor_shape,
2593 } => {
2594 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2595 && set.lamport_timestamp > lamport_timestamp
2596 {
2597 return;
2598 }
2599
2600 self.remote_selections.insert(
2601 lamport_timestamp.replica_id,
2602 SelectionSet {
2603 selections,
2604 lamport_timestamp,
2605 line_mode,
2606 cursor_shape,
2607 },
2608 );
2609 self.text.lamport_clock.observe(lamport_timestamp);
2610 self.non_text_state_update_count += 1;
2611 }
2612 Operation::UpdateCompletionTriggers {
2613 triggers,
2614 lamport_timestamp,
2615 server_id,
2616 } => {
2617 if triggers.is_empty() {
2618 self.completion_triggers_per_language_server
2619 .remove(&server_id);
2620 self.completion_triggers = self
2621 .completion_triggers_per_language_server
2622 .values()
2623 .flat_map(|triggers| triggers.iter().cloned())
2624 .collect();
2625 } else {
2626 self.completion_triggers_per_language_server
2627 .insert(server_id, triggers.iter().cloned().collect());
2628 self.completion_triggers.extend(triggers);
2629 }
2630 self.text.lamport_clock.observe(lamport_timestamp);
2631 }
2632 }
2633 }
2634
2635 fn apply_diagnostic_update(
2636 &mut self,
2637 server_id: LanguageServerId,
2638 diagnostics: DiagnosticSet,
2639 lamport_timestamp: clock::Lamport,
2640 cx: &mut Context<Self>,
2641 ) {
2642 if lamport_timestamp > self.diagnostics_timestamp {
2643 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2644 if diagnostics.is_empty() {
2645 if let Ok(ix) = ix {
2646 self.diagnostics.remove(ix);
2647 }
2648 } else {
2649 match ix {
2650 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2651 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2652 };
2653 }
2654 self.diagnostics_timestamp = lamport_timestamp;
2655 self.non_text_state_update_count += 1;
2656 self.text.lamport_clock.observe(lamport_timestamp);
2657 cx.notify();
2658 cx.emit(BufferEvent::DiagnosticsUpdated);
2659 }
2660 }
2661
2662 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2663 self.was_changed();
2664 cx.emit(BufferEvent::Operation {
2665 operation,
2666 is_local,
2667 });
2668 }
2669
2670 /// Removes the selections for a given peer.
2671 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2672 self.remote_selections.remove(&replica_id);
2673 cx.notify();
2674 }
2675
2676 /// Undoes the most recent transaction.
2677 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2678 let was_dirty = self.is_dirty();
2679 let old_version = self.version.clone();
2680
2681 if let Some((transaction_id, operation)) = self.text.undo() {
2682 self.send_operation(Operation::Buffer(operation), true, cx);
2683 self.did_edit(&old_version, was_dirty, cx);
2684 Some(transaction_id)
2685 } else {
2686 None
2687 }
2688 }
2689
2690 /// Manually undoes a specific transaction in the buffer's undo history.
2691 pub fn undo_transaction(
2692 &mut self,
2693 transaction_id: TransactionId,
2694 cx: &mut Context<Self>,
2695 ) -> bool {
2696 let was_dirty = self.is_dirty();
2697 let old_version = self.version.clone();
2698 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2699 self.send_operation(Operation::Buffer(operation), true, cx);
2700 self.did_edit(&old_version, was_dirty, cx);
2701 true
2702 } else {
2703 false
2704 }
2705 }
2706
2707 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2708 pub fn undo_to_transaction(
2709 &mut self,
2710 transaction_id: TransactionId,
2711 cx: &mut Context<Self>,
2712 ) -> bool {
2713 let was_dirty = self.is_dirty();
2714 let old_version = self.version.clone();
2715
2716 let operations = self.text.undo_to_transaction(transaction_id);
2717 let undone = !operations.is_empty();
2718 for operation in operations {
2719 self.send_operation(Operation::Buffer(operation), true, cx);
2720 }
2721 if undone {
2722 self.did_edit(&old_version, was_dirty, cx)
2723 }
2724 undone
2725 }
2726
2727 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2728 let was_dirty = self.is_dirty();
2729 let operation = self.text.undo_operations(counts);
2730 let old_version = self.version.clone();
2731 self.send_operation(Operation::Buffer(operation), true, cx);
2732 self.did_edit(&old_version, was_dirty, cx);
2733 }
2734
2735 /// Manually redoes a specific transaction in the buffer's redo history.
2736 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2737 let was_dirty = self.is_dirty();
2738 let old_version = self.version.clone();
2739
2740 if let Some((transaction_id, operation)) = self.text.redo() {
2741 self.send_operation(Operation::Buffer(operation), true, cx);
2742 self.did_edit(&old_version, was_dirty, cx);
2743 Some(transaction_id)
2744 } else {
2745 None
2746 }
2747 }
2748
2749 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2750 pub fn redo_to_transaction(
2751 &mut self,
2752 transaction_id: TransactionId,
2753 cx: &mut Context<Self>,
2754 ) -> bool {
2755 let was_dirty = self.is_dirty();
2756 let old_version = self.version.clone();
2757
2758 let operations = self.text.redo_to_transaction(transaction_id);
2759 let redone = !operations.is_empty();
2760 for operation in operations {
2761 self.send_operation(Operation::Buffer(operation), true, cx);
2762 }
2763 if redone {
2764 self.did_edit(&old_version, was_dirty, cx)
2765 }
2766 redone
2767 }
2768
2769 /// Override current completion triggers with the user-provided completion triggers.
2770 pub fn set_completion_triggers(
2771 &mut self,
2772 server_id: LanguageServerId,
2773 triggers: BTreeSet<String>,
2774 cx: &mut Context<Self>,
2775 ) {
2776 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2777 if triggers.is_empty() {
2778 self.completion_triggers_per_language_server
2779 .remove(&server_id);
2780 self.completion_triggers = self
2781 .completion_triggers_per_language_server
2782 .values()
2783 .flat_map(|triggers| triggers.iter().cloned())
2784 .collect();
2785 } else {
2786 self.completion_triggers_per_language_server
2787 .insert(server_id, triggers.clone());
2788 self.completion_triggers.extend(triggers.iter().cloned());
2789 }
2790 self.send_operation(
2791 Operation::UpdateCompletionTriggers {
2792 triggers: triggers.into_iter().collect(),
2793 lamport_timestamp: self.completion_triggers_timestamp,
2794 server_id,
2795 },
2796 true,
2797 cx,
2798 );
2799 cx.notify();
2800 }
2801
2802 /// Returns a list of strings which trigger a completion menu for this language.
2803 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2804 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2805 &self.completion_triggers
2806 }
2807
2808 /// Call this directly after performing edits to prevent the preview tab
2809 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2810 /// to return false until there are additional edits.
2811 pub fn refresh_preview(&mut self) {
2812 self.preview_version = self.version.clone();
2813 }
2814
2815 /// Whether we should preserve the preview status of a tab containing this buffer.
2816 pub fn preserve_preview(&self) -> bool {
2817 !self.has_edits_since(&self.preview_version)
2818 }
2819}
2820
2821#[doc(hidden)]
2822#[cfg(any(test, feature = "test-support"))]
2823impl Buffer {
2824 pub fn edit_via_marked_text(
2825 &mut self,
2826 marked_string: &str,
2827 autoindent_mode: Option<AutoindentMode>,
2828 cx: &mut Context<Self>,
2829 ) {
2830 let edits = self.edits_for_marked_text(marked_string);
2831 self.edit(edits, autoindent_mode, cx);
2832 }
2833
2834 pub fn set_group_interval(&mut self, group_interval: Duration) {
2835 self.text.set_group_interval(group_interval);
2836 }
2837
2838 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2839 where
2840 T: rand::Rng,
2841 {
2842 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2843 let mut last_end = None;
2844 for _ in 0..old_range_count {
2845 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2846 break;
2847 }
2848
2849 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2850 let mut range = self.random_byte_range(new_start, rng);
2851 if rng.gen_bool(0.2) {
2852 mem::swap(&mut range.start, &mut range.end);
2853 }
2854 last_end = Some(range.end);
2855
2856 let new_text_len = rng.gen_range(0..10);
2857 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2858 new_text = new_text.to_uppercase();
2859
2860 edits.push((range, new_text));
2861 }
2862 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2863 self.edit(edits, None, cx);
2864 }
2865
2866 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2867 let was_dirty = self.is_dirty();
2868 let old_version = self.version.clone();
2869
2870 let ops = self.text.randomly_undo_redo(rng);
2871 if !ops.is_empty() {
2872 for op in ops {
2873 self.send_operation(Operation::Buffer(op), true, cx);
2874 self.did_edit(&old_version, was_dirty, cx);
2875 }
2876 }
2877 }
2878}
2879
2880impl EventEmitter<BufferEvent> for Buffer {}
2881
2882impl Deref for Buffer {
2883 type Target = TextBuffer;
2884
2885 fn deref(&self) -> &Self::Target {
2886 &self.text
2887 }
2888}
2889
2890impl BufferSnapshot {
2891 /// Returns [`IndentSize`] for a given line that respects user settings and
2892 /// language preferences.
2893 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2894 indent_size_for_line(self, row)
2895 }
2896
2897 /// Returns [`IndentSize`] for a given position that respects user settings
2898 /// and language preferences.
2899 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2900 let settings = language_settings(
2901 self.language_at(position).map(|l| l.name()),
2902 self.file(),
2903 cx,
2904 );
2905 if settings.hard_tabs {
2906 IndentSize::tab()
2907 } else {
2908 IndentSize::spaces(settings.tab_size.get())
2909 }
2910 }
2911
2912 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2913 /// is passed in as `single_indent_size`.
2914 pub fn suggested_indents(
2915 &self,
2916 rows: impl Iterator<Item = u32>,
2917 single_indent_size: IndentSize,
2918 ) -> BTreeMap<u32, IndentSize> {
2919 let mut result = BTreeMap::new();
2920
2921 for row_range in contiguous_ranges(rows, 10) {
2922 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2923 Some(suggestions) => suggestions,
2924 _ => break,
2925 };
2926
2927 for (row, suggestion) in row_range.zip(suggestions) {
2928 let indent_size = if let Some(suggestion) = suggestion {
2929 result
2930 .get(&suggestion.basis_row)
2931 .copied()
2932 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2933 .with_delta(suggestion.delta, single_indent_size)
2934 } else {
2935 self.indent_size_for_line(row)
2936 };
2937
2938 result.insert(row, indent_size);
2939 }
2940 }
2941
2942 result
2943 }
2944
2945 fn suggest_autoindents(
2946 &self,
2947 row_range: Range<u32>,
2948 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2949 let config = &self.language.as_ref()?.config;
2950 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2951
2952 #[derive(Debug, Clone)]
2953 struct StartPosition {
2954 start: Point,
2955 suffix: SharedString,
2956 }
2957
2958 // Find the suggested indentation ranges based on the syntax tree.
2959 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2960 let end = Point::new(row_range.end, 0);
2961 let range = (start..end).to_offset(&self.text);
2962 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2963 Some(&grammar.indents_config.as_ref()?.query)
2964 });
2965 let indent_configs = matches
2966 .grammars()
2967 .iter()
2968 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2969 .collect::<Vec<_>>();
2970
2971 let mut indent_ranges = Vec::<Range<Point>>::new();
2972 let mut start_positions = Vec::<StartPosition>::new();
2973 let mut outdent_positions = Vec::<Point>::new();
2974 while let Some(mat) = matches.peek() {
2975 let mut start: Option<Point> = None;
2976 let mut end: Option<Point> = None;
2977
2978 let config = indent_configs[mat.grammar_index];
2979 for capture in mat.captures {
2980 if capture.index == config.indent_capture_ix {
2981 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2982 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2983 } else if Some(capture.index) == config.start_capture_ix {
2984 start = Some(Point::from_ts_point(capture.node.end_position()));
2985 } else if Some(capture.index) == config.end_capture_ix {
2986 end = Some(Point::from_ts_point(capture.node.start_position()));
2987 } else if Some(capture.index) == config.outdent_capture_ix {
2988 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2989 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2990 start_positions.push(StartPosition {
2991 start: Point::from_ts_point(capture.node.start_position()),
2992 suffix: suffix.clone(),
2993 });
2994 }
2995 }
2996
2997 matches.advance();
2998 if let Some((start, end)) = start.zip(end) {
2999 if start.row == end.row {
3000 continue;
3001 }
3002 let range = start..end;
3003 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3004 Err(ix) => indent_ranges.insert(ix, range),
3005 Ok(ix) => {
3006 let prev_range = &mut indent_ranges[ix];
3007 prev_range.end = prev_range.end.max(range.end);
3008 }
3009 }
3010 }
3011 }
3012
3013 let mut error_ranges = Vec::<Range<Point>>::new();
3014 let mut matches = self
3015 .syntax
3016 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3017 while let Some(mat) = matches.peek() {
3018 let node = mat.captures[0].node;
3019 let start = Point::from_ts_point(node.start_position());
3020 let end = Point::from_ts_point(node.end_position());
3021 let range = start..end;
3022 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3023 Ok(ix) | Err(ix) => ix,
3024 };
3025 let mut end_ix = ix;
3026 while let Some(existing_range) = error_ranges.get(end_ix) {
3027 if existing_range.end < end {
3028 end_ix += 1;
3029 } else {
3030 break;
3031 }
3032 }
3033 error_ranges.splice(ix..end_ix, [range]);
3034 matches.advance();
3035 }
3036
3037 outdent_positions.sort();
3038 for outdent_position in outdent_positions {
3039 // find the innermost indent range containing this outdent_position
3040 // set its end to the outdent position
3041 if let Some(range_to_truncate) = indent_ranges
3042 .iter_mut()
3043 .filter(|indent_range| indent_range.contains(&outdent_position))
3044 .next_back()
3045 {
3046 range_to_truncate.end = outdent_position;
3047 }
3048 }
3049
3050 start_positions.sort_by_key(|b| b.start);
3051
3052 // Find the suggested indentation increases and decreased based on regexes.
3053 let mut regex_outdent_map = HashMap::default();
3054 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3055 let mut start_positions_iter = start_positions.iter().peekable();
3056
3057 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3058 self.for_each_line(
3059 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3060 ..Point::new(row_range.end, 0),
3061 |row, line| {
3062 if config
3063 .decrease_indent_pattern
3064 .as_ref()
3065 .is_some_and(|regex| regex.is_match(line))
3066 {
3067 indent_change_rows.push((row, Ordering::Less));
3068 }
3069 if config
3070 .increase_indent_pattern
3071 .as_ref()
3072 .is_some_and(|regex| regex.is_match(line))
3073 {
3074 indent_change_rows.push((row + 1, Ordering::Greater));
3075 }
3076 while let Some(pos) = start_positions_iter.peek() {
3077 if pos.start.row < row {
3078 let pos = start_positions_iter.next().unwrap();
3079 last_seen_suffix
3080 .entry(pos.suffix.to_string())
3081 .or_default()
3082 .push(pos.start);
3083 } else {
3084 break;
3085 }
3086 }
3087 for rule in &config.decrease_indent_patterns {
3088 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3089 let row_start_column = self.indent_size_for_line(row).len;
3090 let basis_row = rule
3091 .valid_after
3092 .iter()
3093 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3094 .flatten()
3095 .filter(|start_point| start_point.column <= row_start_column)
3096 .max_by_key(|start_point| start_point.row);
3097 if let Some(outdent_to_row) = basis_row {
3098 regex_outdent_map.insert(row, outdent_to_row.row);
3099 }
3100 break;
3101 }
3102 }
3103 },
3104 );
3105
3106 let mut indent_changes = indent_change_rows.into_iter().peekable();
3107 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3108 prev_non_blank_row.unwrap_or(0)
3109 } else {
3110 row_range.start.saturating_sub(1)
3111 };
3112
3113 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3114 Some(row_range.map(move |row| {
3115 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3116
3117 let mut indent_from_prev_row = false;
3118 let mut outdent_from_prev_row = false;
3119 let mut outdent_to_row = u32::MAX;
3120 let mut from_regex = false;
3121
3122 while let Some((indent_row, delta)) = indent_changes.peek() {
3123 match indent_row.cmp(&row) {
3124 Ordering::Equal => match delta {
3125 Ordering::Less => {
3126 from_regex = true;
3127 outdent_from_prev_row = true
3128 }
3129 Ordering::Greater => {
3130 indent_from_prev_row = true;
3131 from_regex = true
3132 }
3133 _ => {}
3134 },
3135
3136 Ordering::Greater => break,
3137 Ordering::Less => {}
3138 }
3139
3140 indent_changes.next();
3141 }
3142
3143 for range in &indent_ranges {
3144 if range.start.row >= row {
3145 break;
3146 }
3147 if range.start.row == prev_row && range.end > row_start {
3148 indent_from_prev_row = true;
3149 }
3150 if range.end > prev_row_start && range.end <= row_start {
3151 outdent_to_row = outdent_to_row.min(range.start.row);
3152 }
3153 }
3154
3155 if let Some(basis_row) = regex_outdent_map.get(&row) {
3156 indent_from_prev_row = false;
3157 outdent_to_row = *basis_row;
3158 from_regex = true;
3159 }
3160
3161 let within_error = error_ranges
3162 .iter()
3163 .any(|e| e.start.row < row && e.end > row_start);
3164
3165 let suggestion = if outdent_to_row == prev_row
3166 || (outdent_from_prev_row && indent_from_prev_row)
3167 {
3168 Some(IndentSuggestion {
3169 basis_row: prev_row,
3170 delta: Ordering::Equal,
3171 within_error: within_error && !from_regex,
3172 })
3173 } else if indent_from_prev_row {
3174 Some(IndentSuggestion {
3175 basis_row: prev_row,
3176 delta: Ordering::Greater,
3177 within_error: within_error && !from_regex,
3178 })
3179 } else if outdent_to_row < prev_row {
3180 Some(IndentSuggestion {
3181 basis_row: outdent_to_row,
3182 delta: Ordering::Equal,
3183 within_error: within_error && !from_regex,
3184 })
3185 } else if outdent_from_prev_row {
3186 Some(IndentSuggestion {
3187 basis_row: prev_row,
3188 delta: Ordering::Less,
3189 within_error: within_error && !from_regex,
3190 })
3191 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3192 {
3193 Some(IndentSuggestion {
3194 basis_row: prev_row,
3195 delta: Ordering::Equal,
3196 within_error: within_error && !from_regex,
3197 })
3198 } else {
3199 None
3200 };
3201
3202 prev_row = row;
3203 prev_row_start = row_start;
3204 suggestion
3205 }))
3206 }
3207
3208 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3209 while row > 0 {
3210 row -= 1;
3211 if !self.is_line_blank(row) {
3212 return Some(row);
3213 }
3214 }
3215 None
3216 }
3217
3218 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3219 let captures = self.syntax.captures(range, &self.text, |grammar| {
3220 grammar.highlights_query.as_ref()
3221 });
3222 let highlight_maps = captures
3223 .grammars()
3224 .iter()
3225 .map(|grammar| grammar.highlight_map())
3226 .collect();
3227 (captures, highlight_maps)
3228 }
3229
3230 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3231 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3232 /// returned in chunks where each chunk has a single syntax highlighting style and
3233 /// diagnostic status.
3234 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3235 let range = range.start.to_offset(self)..range.end.to_offset(self);
3236
3237 let mut syntax = None;
3238 if language_aware {
3239 syntax = Some(self.get_highlights(range.clone()));
3240 }
3241 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3242 let diagnostics = language_aware;
3243 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3244 }
3245
3246 pub fn highlighted_text_for_range<T: ToOffset>(
3247 &self,
3248 range: Range<T>,
3249 override_style: Option<HighlightStyle>,
3250 syntax_theme: &SyntaxTheme,
3251 ) -> HighlightedText {
3252 HighlightedText::from_buffer_range(
3253 range,
3254 &self.text,
3255 &self.syntax,
3256 override_style,
3257 syntax_theme,
3258 )
3259 }
3260
3261 /// Invokes the given callback for each line of text in the given range of the buffer.
3262 /// Uses callback to avoid allocating a string for each line.
3263 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3264 let mut line = String::new();
3265 let mut row = range.start.row;
3266 for chunk in self
3267 .as_rope()
3268 .chunks_in_range(range.to_offset(self))
3269 .chain(["\n"])
3270 {
3271 for (newline_ix, text) in chunk.split('\n').enumerate() {
3272 if newline_ix > 0 {
3273 callback(row, &line);
3274 row += 1;
3275 line.clear();
3276 }
3277 line.push_str(text);
3278 }
3279 }
3280 }
3281
3282 /// Iterates over every [`SyntaxLayer`] in the buffer.
3283 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3284 self.syntax
3285 .layers_for_range(0..self.len(), &self.text, true)
3286 }
3287
3288 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3289 let offset = position.to_offset(self);
3290 self.syntax
3291 .layers_for_range(offset..offset, &self.text, false)
3292 .filter(|l| l.node().end_byte() > offset)
3293 .last()
3294 }
3295
3296 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3297 &self,
3298 range: Range<D>,
3299 ) -> Option<SyntaxLayer<'_>> {
3300 let range = range.to_offset(self);
3301 self.syntax
3302 .layers_for_range(range, &self.text, false)
3303 .max_by(|a, b| {
3304 if a.depth != b.depth {
3305 a.depth.cmp(&b.depth)
3306 } else if a.offset.0 != b.offset.0 {
3307 a.offset.0.cmp(&b.offset.0)
3308 } else {
3309 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3310 }
3311 })
3312 }
3313
3314 /// Returns the main [`Language`].
3315 pub fn language(&self) -> Option<&Arc<Language>> {
3316 self.language.as_ref()
3317 }
3318
3319 /// Returns the [`Language`] at the given location.
3320 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3321 self.syntax_layer_at(position)
3322 .map(|info| info.language)
3323 .or(self.language.as_ref())
3324 }
3325
3326 /// Returns the settings for the language at the given location.
3327 pub fn settings_at<'a, D: ToOffset>(
3328 &'a self,
3329 position: D,
3330 cx: &'a App,
3331 ) -> Cow<'a, LanguageSettings> {
3332 language_settings(
3333 self.language_at(position).map(|l| l.name()),
3334 self.file.as_ref(),
3335 cx,
3336 )
3337 }
3338
3339 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3340 CharClassifier::new(self.language_scope_at(point))
3341 }
3342
3343 /// Returns the [`LanguageScope`] at the given location.
3344 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3345 let offset = position.to_offset(self);
3346 let mut scope = None;
3347 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3348
3349 // Use the layer that has the smallest node intersecting the given point.
3350 for layer in self
3351 .syntax
3352 .layers_for_range(offset..offset, &self.text, false)
3353 {
3354 let mut cursor = layer.node().walk();
3355
3356 let mut range = None;
3357 loop {
3358 let child_range = cursor.node().byte_range();
3359 if !child_range.contains(&offset) {
3360 break;
3361 }
3362
3363 range = Some(child_range);
3364 if cursor.goto_first_child_for_byte(offset).is_none() {
3365 break;
3366 }
3367 }
3368
3369 if let Some(range) = range
3370 && smallest_range_and_depth.as_ref().is_none_or(
3371 |(smallest_range, smallest_range_depth)| {
3372 if layer.depth > *smallest_range_depth {
3373 true
3374 } else if layer.depth == *smallest_range_depth {
3375 range.len() < smallest_range.len()
3376 } else {
3377 false
3378 }
3379 },
3380 )
3381 {
3382 smallest_range_and_depth = Some((range, layer.depth));
3383 scope = Some(LanguageScope {
3384 language: layer.language.clone(),
3385 override_id: layer.override_id(offset, &self.text),
3386 });
3387 }
3388 }
3389
3390 scope.or_else(|| {
3391 self.language.clone().map(|language| LanguageScope {
3392 language,
3393 override_id: None,
3394 })
3395 })
3396 }
3397
3398 /// Returns a tuple of the range and character kind of the word
3399 /// surrounding the given position.
3400 pub fn surrounding_word<T: ToOffset>(
3401 &self,
3402 start: T,
3403 for_completion: bool,
3404 ) -> (Range<usize>, Option<CharKind>) {
3405 let mut start = start.to_offset(self);
3406 let mut end = start;
3407 let mut next_chars = self.chars_at(start).take(128).peekable();
3408 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3409
3410 let classifier = self
3411 .char_classifier_at(start)
3412 .for_completion(for_completion);
3413 let word_kind = cmp::max(
3414 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3415 next_chars.peek().copied().map(|c| classifier.kind(c)),
3416 );
3417
3418 for ch in prev_chars {
3419 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3420 start -= ch.len_utf8();
3421 } else {
3422 break;
3423 }
3424 }
3425
3426 for ch in next_chars {
3427 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3428 end += ch.len_utf8();
3429 } else {
3430 break;
3431 }
3432 }
3433
3434 (start..end, word_kind)
3435 }
3436
3437 /// Returns the closest syntax node enclosing the given range.
3438 pub fn syntax_ancestor<'a, T: ToOffset>(
3439 &'a self,
3440 range: Range<T>,
3441 ) -> Option<tree_sitter::Node<'a>> {
3442 let range = range.start.to_offset(self)..range.end.to_offset(self);
3443 let mut result: Option<tree_sitter::Node<'a>> = None;
3444 'outer: for layer in self
3445 .syntax
3446 .layers_for_range(range.clone(), &self.text, true)
3447 {
3448 let mut cursor = layer.node().walk();
3449
3450 // Descend to the first leaf that touches the start of the range.
3451 //
3452 // If the range is non-empty and the current node ends exactly at the start,
3453 // move to the next sibling to find a node that extends beyond the start.
3454 //
3455 // If the range is empty and the current node starts after the range position,
3456 // move to the previous sibling to find the node that contains the position.
3457 while cursor.goto_first_child_for_byte(range.start).is_some() {
3458 if !range.is_empty() && cursor.node().end_byte() == range.start {
3459 cursor.goto_next_sibling();
3460 }
3461 if range.is_empty() && cursor.node().start_byte() > range.start {
3462 cursor.goto_previous_sibling();
3463 }
3464 }
3465
3466 // Ascend to the smallest ancestor that strictly contains the range.
3467 loop {
3468 let node_range = cursor.node().byte_range();
3469 if node_range.start <= range.start
3470 && node_range.end >= range.end
3471 && node_range.len() > range.len()
3472 {
3473 break;
3474 }
3475 if !cursor.goto_parent() {
3476 continue 'outer;
3477 }
3478 }
3479
3480 let left_node = cursor.node();
3481 let mut layer_result = left_node;
3482
3483 // For an empty range, try to find another node immediately to the right of the range.
3484 if left_node.end_byte() == range.start {
3485 let mut right_node = None;
3486 while !cursor.goto_next_sibling() {
3487 if !cursor.goto_parent() {
3488 break;
3489 }
3490 }
3491
3492 while cursor.node().start_byte() == range.start {
3493 right_node = Some(cursor.node());
3494 if !cursor.goto_first_child() {
3495 break;
3496 }
3497 }
3498
3499 // If there is a candidate node on both sides of the (empty) range, then
3500 // decide between the two by favoring a named node over an anonymous token.
3501 // If both nodes are the same in that regard, favor the right one.
3502 if let Some(right_node) = right_node
3503 && (right_node.is_named() || !left_node.is_named())
3504 {
3505 layer_result = right_node;
3506 }
3507 }
3508
3509 if let Some(previous_result) = &result
3510 && previous_result.byte_range().len() < layer_result.byte_range().len()
3511 {
3512 continue;
3513 }
3514 result = Some(layer_result);
3515 }
3516
3517 result
3518 }
3519
3520 /// Returns the root syntax node within the given row
3521 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3522 let start_offset = position.to_offset(self);
3523
3524 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3525
3526 let layer = self
3527 .syntax
3528 .layers_for_range(start_offset..start_offset, &self.text, true)
3529 .next()?;
3530
3531 let mut cursor = layer.node().walk();
3532
3533 // Descend to the first leaf that touches the start of the range.
3534 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3535 if cursor.node().end_byte() == start_offset {
3536 cursor.goto_next_sibling();
3537 }
3538 }
3539
3540 // Ascend to the root node within the same row.
3541 while cursor.goto_parent() {
3542 if cursor.node().start_position().row != row {
3543 break;
3544 }
3545 }
3546
3547 Some(cursor.node())
3548 }
3549
3550 /// Returns the outline for the buffer.
3551 ///
3552 /// This method allows passing an optional [`SyntaxTheme`] to
3553 /// syntax-highlight the returned symbols.
3554 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3555 self.outline_items_containing(0..self.len(), true, theme)
3556 .map(Outline::new)
3557 }
3558
3559 /// Returns all the symbols that contain the given position.
3560 ///
3561 /// This method allows passing an optional [`SyntaxTheme`] to
3562 /// syntax-highlight the returned symbols.
3563 pub fn symbols_containing<T: ToOffset>(
3564 &self,
3565 position: T,
3566 theme: Option<&SyntaxTheme>,
3567 ) -> Option<Vec<OutlineItem<Anchor>>> {
3568 let position = position.to_offset(self);
3569 let mut items = self.outline_items_containing(
3570 position.saturating_sub(1)..self.len().min(position + 1),
3571 false,
3572 theme,
3573 )?;
3574 let mut prev_depth = None;
3575 items.retain(|item| {
3576 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3577 prev_depth = Some(item.depth);
3578 result
3579 });
3580 Some(items)
3581 }
3582
3583 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3584 let range = range.to_offset(self);
3585 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3586 grammar.outline_config.as_ref().map(|c| &c.query)
3587 });
3588 let configs = matches
3589 .grammars()
3590 .iter()
3591 .map(|g| g.outline_config.as_ref().unwrap())
3592 .collect::<Vec<_>>();
3593
3594 while let Some(mat) = matches.peek() {
3595 let config = &configs[mat.grammar_index];
3596 let containing_item_node = maybe!({
3597 let item_node = mat.captures.iter().find_map(|cap| {
3598 if cap.index == config.item_capture_ix {
3599 Some(cap.node)
3600 } else {
3601 None
3602 }
3603 })?;
3604
3605 let item_byte_range = item_node.byte_range();
3606 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3607 None
3608 } else {
3609 Some(item_node)
3610 }
3611 });
3612
3613 if let Some(item_node) = containing_item_node {
3614 return Some(
3615 Point::from_ts_point(item_node.start_position())
3616 ..Point::from_ts_point(item_node.end_position()),
3617 );
3618 }
3619
3620 matches.advance();
3621 }
3622 None
3623 }
3624
3625 pub fn outline_items_containing<T: ToOffset>(
3626 &self,
3627 range: Range<T>,
3628 include_extra_context: bool,
3629 theme: Option<&SyntaxTheme>,
3630 ) -> Option<Vec<OutlineItem<Anchor>>> {
3631 let range = range.to_offset(self);
3632 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3633 grammar.outline_config.as_ref().map(|c| &c.query)
3634 });
3635 let configs = matches
3636 .grammars()
3637 .iter()
3638 .map(|g| g.outline_config.as_ref().unwrap())
3639 .collect::<Vec<_>>();
3640
3641 let mut items = Vec::new();
3642 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3643 while let Some(mat) = matches.peek() {
3644 let config = &configs[mat.grammar_index];
3645 if let Some(item) =
3646 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3647 {
3648 items.push(item);
3649 } else if let Some(capture) = mat
3650 .captures
3651 .iter()
3652 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3653 {
3654 let capture_range = capture.node.start_position()..capture.node.end_position();
3655 let mut capture_row_range =
3656 capture_range.start.row as u32..capture_range.end.row as u32;
3657 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3658 {
3659 capture_row_range.end -= 1;
3660 }
3661 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3662 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3663 last_row_range.end = capture_row_range.end;
3664 } else {
3665 annotation_row_ranges.push(capture_row_range);
3666 }
3667 } else {
3668 annotation_row_ranges.push(capture_row_range);
3669 }
3670 }
3671 matches.advance();
3672 }
3673
3674 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3675
3676 // Assign depths based on containment relationships and convert to anchors.
3677 let mut item_ends_stack = Vec::<Point>::new();
3678 let mut anchor_items = Vec::new();
3679 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3680 for item in items {
3681 while let Some(last_end) = item_ends_stack.last().copied() {
3682 if last_end < item.range.end {
3683 item_ends_stack.pop();
3684 } else {
3685 break;
3686 }
3687 }
3688
3689 let mut annotation_row_range = None;
3690 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3691 let row_preceding_item = item.range.start.row.saturating_sub(1);
3692 if next_annotation_row_range.end < row_preceding_item {
3693 annotation_row_ranges.next();
3694 } else {
3695 if next_annotation_row_range.end == row_preceding_item {
3696 annotation_row_range = Some(next_annotation_row_range.clone());
3697 annotation_row_ranges.next();
3698 }
3699 break;
3700 }
3701 }
3702
3703 anchor_items.push(OutlineItem {
3704 depth: item_ends_stack.len(),
3705 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3706 text: item.text,
3707 highlight_ranges: item.highlight_ranges,
3708 name_ranges: item.name_ranges,
3709 body_range: item.body_range.map(|body_range| {
3710 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3711 }),
3712 annotation_range: annotation_row_range.map(|annotation_range| {
3713 self.anchor_after(Point::new(annotation_range.start, 0))
3714 ..self.anchor_before(Point::new(
3715 annotation_range.end,
3716 self.line_len(annotation_range.end),
3717 ))
3718 }),
3719 });
3720 item_ends_stack.push(item.range.end);
3721 }
3722
3723 Some(anchor_items)
3724 }
3725
3726 fn next_outline_item(
3727 &self,
3728 config: &OutlineConfig,
3729 mat: &SyntaxMapMatch,
3730 range: &Range<usize>,
3731 include_extra_context: bool,
3732 theme: Option<&SyntaxTheme>,
3733 ) -> Option<OutlineItem<Point>> {
3734 let item_node = mat.captures.iter().find_map(|cap| {
3735 if cap.index == config.item_capture_ix {
3736 Some(cap.node)
3737 } else {
3738 None
3739 }
3740 })?;
3741
3742 let item_byte_range = item_node.byte_range();
3743 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3744 return None;
3745 }
3746 let item_point_range = Point::from_ts_point(item_node.start_position())
3747 ..Point::from_ts_point(item_node.end_position());
3748
3749 let mut open_point = None;
3750 let mut close_point = None;
3751 let mut buffer_ranges = Vec::new();
3752 for capture in mat.captures {
3753 let node_is_name;
3754 if capture.index == config.name_capture_ix {
3755 node_is_name = true;
3756 } else if Some(capture.index) == config.context_capture_ix
3757 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3758 {
3759 node_is_name = false;
3760 } else {
3761 if Some(capture.index) == config.open_capture_ix {
3762 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3763 } else if Some(capture.index) == config.close_capture_ix {
3764 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3765 }
3766
3767 continue;
3768 }
3769
3770 let mut range = capture.node.start_byte()..capture.node.end_byte();
3771 let start = capture.node.start_position();
3772 if capture.node.end_position().row > start.row {
3773 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3774 }
3775
3776 if !range.is_empty() {
3777 buffer_ranges.push((range, node_is_name));
3778 }
3779 }
3780 if buffer_ranges.is_empty() {
3781 return None;
3782 }
3783 let mut text = String::new();
3784 let mut highlight_ranges = Vec::new();
3785 let mut name_ranges = Vec::new();
3786 let mut chunks = self.chunks(
3787 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3788 true,
3789 );
3790 let mut last_buffer_range_end = 0;
3791
3792 for (buffer_range, is_name) in buffer_ranges {
3793 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3794 if space_added {
3795 text.push(' ');
3796 }
3797 let before_append_len = text.len();
3798 let mut offset = buffer_range.start;
3799 chunks.seek(buffer_range.clone());
3800 for mut chunk in chunks.by_ref() {
3801 if chunk.text.len() > buffer_range.end - offset {
3802 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3803 offset = buffer_range.end;
3804 } else {
3805 offset += chunk.text.len();
3806 }
3807 let style = chunk
3808 .syntax_highlight_id
3809 .zip(theme)
3810 .and_then(|(highlight, theme)| highlight.style(theme));
3811 if let Some(style) = style {
3812 let start = text.len();
3813 let end = start + chunk.text.len();
3814 highlight_ranges.push((start..end, style));
3815 }
3816 text.push_str(chunk.text);
3817 if offset >= buffer_range.end {
3818 break;
3819 }
3820 }
3821 if is_name {
3822 let after_append_len = text.len();
3823 let start = if space_added && !name_ranges.is_empty() {
3824 before_append_len - 1
3825 } else {
3826 before_append_len
3827 };
3828 name_ranges.push(start..after_append_len);
3829 }
3830 last_buffer_range_end = buffer_range.end;
3831 }
3832
3833 Some(OutlineItem {
3834 depth: 0, // We'll calculate the depth later
3835 range: item_point_range,
3836 text,
3837 highlight_ranges,
3838 name_ranges,
3839 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3840 annotation_range: None,
3841 })
3842 }
3843
3844 pub fn function_body_fold_ranges<T: ToOffset>(
3845 &self,
3846 within: Range<T>,
3847 ) -> impl Iterator<Item = Range<usize>> + '_ {
3848 self.text_object_ranges(within, TreeSitterOptions::default())
3849 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3850 }
3851
3852 /// For each grammar in the language, runs the provided
3853 /// [`tree_sitter::Query`] against the given range.
3854 pub fn matches(
3855 &self,
3856 range: Range<usize>,
3857 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3858 ) -> SyntaxMapMatches<'_> {
3859 self.syntax.matches(range, self, query)
3860 }
3861
3862 pub fn all_bracket_ranges(
3863 &self,
3864 range: Range<usize>,
3865 ) -> impl Iterator<Item = BracketMatch> + '_ {
3866 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3867 grammar.brackets_config.as_ref().map(|c| &c.query)
3868 });
3869 let configs = matches
3870 .grammars()
3871 .iter()
3872 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3873 .collect::<Vec<_>>();
3874
3875 iter::from_fn(move || {
3876 while let Some(mat) = matches.peek() {
3877 let mut open = None;
3878 let mut close = None;
3879 let config = &configs[mat.grammar_index];
3880 let pattern = &config.patterns[mat.pattern_index];
3881 for capture in mat.captures {
3882 if capture.index == config.open_capture_ix {
3883 open = Some(capture.node.byte_range());
3884 } else if capture.index == config.close_capture_ix {
3885 close = Some(capture.node.byte_range());
3886 }
3887 }
3888
3889 matches.advance();
3890
3891 let Some((open_range, close_range)) = open.zip(close) else {
3892 continue;
3893 };
3894
3895 let bracket_range = open_range.start..=close_range.end;
3896 if !bracket_range.overlaps(&range) {
3897 continue;
3898 }
3899
3900 return Some(BracketMatch {
3901 open_range,
3902 close_range,
3903 newline_only: pattern.newline_only,
3904 });
3905 }
3906 None
3907 })
3908 }
3909
3910 /// Returns bracket range pairs overlapping or adjacent to `range`
3911 pub fn bracket_ranges<T: ToOffset>(
3912 &self,
3913 range: Range<T>,
3914 ) -> impl Iterator<Item = BracketMatch> + '_ {
3915 // Find bracket pairs that *inclusively* contain the given range.
3916 let range = range.start.to_offset(self).saturating_sub(1)
3917 ..self.len().min(range.end.to_offset(self) + 1);
3918 self.all_bracket_ranges(range)
3919 .filter(|pair| !pair.newline_only)
3920 }
3921
3922 pub fn debug_variables_query<T: ToOffset>(
3923 &self,
3924 range: Range<T>,
3925 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3926 let range = range.start.to_offset(self).saturating_sub(1)
3927 ..self.len().min(range.end.to_offset(self) + 1);
3928
3929 let mut matches = self.syntax.matches_with_options(
3930 range.clone(),
3931 &self.text,
3932 TreeSitterOptions::default(),
3933 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3934 );
3935
3936 let configs = matches
3937 .grammars()
3938 .iter()
3939 .map(|grammar| grammar.debug_variables_config.as_ref())
3940 .collect::<Vec<_>>();
3941
3942 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3943
3944 iter::from_fn(move || {
3945 loop {
3946 while let Some(capture) = captures.pop() {
3947 if capture.0.overlaps(&range) {
3948 return Some(capture);
3949 }
3950 }
3951
3952 let mat = matches.peek()?;
3953
3954 let Some(config) = configs[mat.grammar_index].as_ref() else {
3955 matches.advance();
3956 continue;
3957 };
3958
3959 for capture in mat.captures {
3960 let Some(ix) = config
3961 .objects_by_capture_ix
3962 .binary_search_by_key(&capture.index, |e| e.0)
3963 .ok()
3964 else {
3965 continue;
3966 };
3967 let text_object = config.objects_by_capture_ix[ix].1;
3968 let byte_range = capture.node.byte_range();
3969
3970 let mut found = false;
3971 for (range, existing) in captures.iter_mut() {
3972 if existing == &text_object {
3973 range.start = range.start.min(byte_range.start);
3974 range.end = range.end.max(byte_range.end);
3975 found = true;
3976 break;
3977 }
3978 }
3979
3980 if !found {
3981 captures.push((byte_range, text_object));
3982 }
3983 }
3984
3985 matches.advance();
3986 }
3987 })
3988 }
3989
3990 pub fn text_object_ranges<T: ToOffset>(
3991 &self,
3992 range: Range<T>,
3993 options: TreeSitterOptions,
3994 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3995 let range = range.start.to_offset(self).saturating_sub(1)
3996 ..self.len().min(range.end.to_offset(self) + 1);
3997
3998 let mut matches =
3999 self.syntax
4000 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4001 grammar.text_object_config.as_ref().map(|c| &c.query)
4002 });
4003
4004 let configs = matches
4005 .grammars()
4006 .iter()
4007 .map(|grammar| grammar.text_object_config.as_ref())
4008 .collect::<Vec<_>>();
4009
4010 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4011
4012 iter::from_fn(move || {
4013 loop {
4014 while let Some(capture) = captures.pop() {
4015 if capture.0.overlaps(&range) {
4016 return Some(capture);
4017 }
4018 }
4019
4020 let mat = matches.peek()?;
4021
4022 let Some(config) = configs[mat.grammar_index].as_ref() else {
4023 matches.advance();
4024 continue;
4025 };
4026
4027 for capture in mat.captures {
4028 let Some(ix) = config
4029 .text_objects_by_capture_ix
4030 .binary_search_by_key(&capture.index, |e| e.0)
4031 .ok()
4032 else {
4033 continue;
4034 };
4035 let text_object = config.text_objects_by_capture_ix[ix].1;
4036 let byte_range = capture.node.byte_range();
4037
4038 let mut found = false;
4039 for (range, existing) in captures.iter_mut() {
4040 if existing == &text_object {
4041 range.start = range.start.min(byte_range.start);
4042 range.end = range.end.max(byte_range.end);
4043 found = true;
4044 break;
4045 }
4046 }
4047
4048 if !found {
4049 captures.push((byte_range, text_object));
4050 }
4051 }
4052
4053 matches.advance();
4054 }
4055 })
4056 }
4057
4058 /// Returns enclosing bracket ranges containing the given range
4059 pub fn enclosing_bracket_ranges<T: ToOffset>(
4060 &self,
4061 range: Range<T>,
4062 ) -> impl Iterator<Item = BracketMatch> + '_ {
4063 let range = range.start.to_offset(self)..range.end.to_offset(self);
4064
4065 self.bracket_ranges(range.clone()).filter(move |pair| {
4066 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4067 })
4068 }
4069
4070 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4071 ///
4072 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4073 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4074 &self,
4075 range: Range<T>,
4076 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4077 ) -> Option<(Range<usize>, Range<usize>)> {
4078 let range = range.start.to_offset(self)..range.end.to_offset(self);
4079
4080 // Get the ranges of the innermost pair of brackets.
4081 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4082
4083 for pair in self.enclosing_bracket_ranges(range) {
4084 if let Some(range_filter) = range_filter
4085 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4086 {
4087 continue;
4088 }
4089
4090 let len = pair.close_range.end - pair.open_range.start;
4091
4092 if let Some((existing_open, existing_close)) = &result {
4093 let existing_len = existing_close.end - existing_open.start;
4094 if len > existing_len {
4095 continue;
4096 }
4097 }
4098
4099 result = Some((pair.open_range, pair.close_range));
4100 }
4101
4102 result
4103 }
4104
4105 /// Returns anchor ranges for any matches of the redaction query.
4106 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4107 /// will be run on the relevant section of the buffer.
4108 pub fn redacted_ranges<T: ToOffset>(
4109 &self,
4110 range: Range<T>,
4111 ) -> impl Iterator<Item = Range<usize>> + '_ {
4112 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4113 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4114 grammar
4115 .redactions_config
4116 .as_ref()
4117 .map(|config| &config.query)
4118 });
4119
4120 let configs = syntax_matches
4121 .grammars()
4122 .iter()
4123 .map(|grammar| grammar.redactions_config.as_ref())
4124 .collect::<Vec<_>>();
4125
4126 iter::from_fn(move || {
4127 let redacted_range = syntax_matches
4128 .peek()
4129 .and_then(|mat| {
4130 configs[mat.grammar_index].and_then(|config| {
4131 mat.captures
4132 .iter()
4133 .find(|capture| capture.index == config.redaction_capture_ix)
4134 })
4135 })
4136 .map(|mat| mat.node.byte_range());
4137 syntax_matches.advance();
4138 redacted_range
4139 })
4140 }
4141
4142 pub fn injections_intersecting_range<T: ToOffset>(
4143 &self,
4144 range: Range<T>,
4145 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4146 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4147
4148 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4149 grammar
4150 .injection_config
4151 .as_ref()
4152 .map(|config| &config.query)
4153 });
4154
4155 let configs = syntax_matches
4156 .grammars()
4157 .iter()
4158 .map(|grammar| grammar.injection_config.as_ref())
4159 .collect::<Vec<_>>();
4160
4161 iter::from_fn(move || {
4162 let ranges = syntax_matches.peek().and_then(|mat| {
4163 let config = &configs[mat.grammar_index]?;
4164 let content_capture_range = mat.captures.iter().find_map(|capture| {
4165 if capture.index == config.content_capture_ix {
4166 Some(capture.node.byte_range())
4167 } else {
4168 None
4169 }
4170 })?;
4171 let language = self.language_at(content_capture_range.start)?;
4172 Some((content_capture_range, language))
4173 });
4174 syntax_matches.advance();
4175 ranges
4176 })
4177 }
4178
4179 pub fn runnable_ranges(
4180 &self,
4181 offset_range: Range<usize>,
4182 ) -> impl Iterator<Item = RunnableRange> + '_ {
4183 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4184 grammar.runnable_config.as_ref().map(|config| &config.query)
4185 });
4186
4187 let test_configs = syntax_matches
4188 .grammars()
4189 .iter()
4190 .map(|grammar| grammar.runnable_config.as_ref())
4191 .collect::<Vec<_>>();
4192
4193 iter::from_fn(move || {
4194 loop {
4195 let mat = syntax_matches.peek()?;
4196
4197 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4198 let mut run_range = None;
4199 let full_range = mat.captures.iter().fold(
4200 Range {
4201 start: usize::MAX,
4202 end: 0,
4203 },
4204 |mut acc, next| {
4205 let byte_range = next.node.byte_range();
4206 if acc.start > byte_range.start {
4207 acc.start = byte_range.start;
4208 }
4209 if acc.end < byte_range.end {
4210 acc.end = byte_range.end;
4211 }
4212 acc
4213 },
4214 );
4215 if full_range.start > full_range.end {
4216 // We did not find a full spanning range of this match.
4217 return None;
4218 }
4219 let extra_captures: SmallVec<[_; 1]> =
4220 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4221 test_configs
4222 .extra_captures
4223 .get(capture.index as usize)
4224 .cloned()
4225 .and_then(|tag_name| match tag_name {
4226 RunnableCapture::Named(name) => {
4227 Some((capture.node.byte_range(), name))
4228 }
4229 RunnableCapture::Run => {
4230 let _ = run_range.insert(capture.node.byte_range());
4231 None
4232 }
4233 })
4234 }));
4235 let run_range = run_range?;
4236 let tags = test_configs
4237 .query
4238 .property_settings(mat.pattern_index)
4239 .iter()
4240 .filter_map(|property| {
4241 if *property.key == *"tag" {
4242 property
4243 .value
4244 .as_ref()
4245 .map(|value| RunnableTag(value.to_string().into()))
4246 } else {
4247 None
4248 }
4249 })
4250 .collect();
4251 let extra_captures = extra_captures
4252 .into_iter()
4253 .map(|(range, name)| {
4254 (
4255 name.to_string(),
4256 self.text_for_range(range).collect::<String>(),
4257 )
4258 })
4259 .collect();
4260 // All tags should have the same range.
4261 Some(RunnableRange {
4262 run_range,
4263 full_range,
4264 runnable: Runnable {
4265 tags,
4266 language: mat.language,
4267 buffer: self.remote_id(),
4268 },
4269 extra_captures,
4270 buffer_id: self.remote_id(),
4271 })
4272 });
4273
4274 syntax_matches.advance();
4275 if test_range.is_some() {
4276 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4277 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4278 return test_range;
4279 }
4280 }
4281 })
4282 }
4283
4284 /// Returns selections for remote peers intersecting the given range.
4285 #[allow(clippy::type_complexity)]
4286 pub fn selections_in_range(
4287 &self,
4288 range: Range<Anchor>,
4289 include_local: bool,
4290 ) -> impl Iterator<
4291 Item = (
4292 ReplicaId,
4293 bool,
4294 CursorShape,
4295 impl Iterator<Item = &Selection<Anchor>> + '_,
4296 ),
4297 > + '_ {
4298 self.remote_selections
4299 .iter()
4300 .filter(move |(replica_id, set)| {
4301 (include_local || **replica_id != self.text.replica_id())
4302 && !set.selections.is_empty()
4303 })
4304 .map(move |(replica_id, set)| {
4305 let start_ix = match set.selections.binary_search_by(|probe| {
4306 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4307 }) {
4308 Ok(ix) | Err(ix) => ix,
4309 };
4310 let end_ix = match set.selections.binary_search_by(|probe| {
4311 probe.start.cmp(&range.end, self).then(Ordering::Less)
4312 }) {
4313 Ok(ix) | Err(ix) => ix,
4314 };
4315
4316 (
4317 *replica_id,
4318 set.line_mode,
4319 set.cursor_shape,
4320 set.selections[start_ix..end_ix].iter(),
4321 )
4322 })
4323 }
4324
4325 /// Returns if the buffer contains any diagnostics.
4326 pub fn has_diagnostics(&self) -> bool {
4327 !self.diagnostics.is_empty()
4328 }
4329
4330 /// Returns all the diagnostics intersecting the given range.
4331 pub fn diagnostics_in_range<'a, T, O>(
4332 &'a self,
4333 search_range: Range<T>,
4334 reversed: bool,
4335 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4336 where
4337 T: 'a + Clone + ToOffset,
4338 O: 'a + FromAnchor,
4339 {
4340 let mut iterators: Vec<_> = self
4341 .diagnostics
4342 .iter()
4343 .map(|(_, collection)| {
4344 collection
4345 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4346 .peekable()
4347 })
4348 .collect();
4349
4350 std::iter::from_fn(move || {
4351 let (next_ix, _) = iterators
4352 .iter_mut()
4353 .enumerate()
4354 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4355 .min_by(|(_, a), (_, b)| {
4356 let cmp = a
4357 .range
4358 .start
4359 .cmp(&b.range.start, self)
4360 // when range is equal, sort by diagnostic severity
4361 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4362 // and stabilize order with group_id
4363 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4364 if reversed { cmp.reverse() } else { cmp }
4365 })?;
4366 iterators[next_ix]
4367 .next()
4368 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4369 diagnostic,
4370 range: FromAnchor::from_anchor(&range.start, self)
4371 ..FromAnchor::from_anchor(&range.end, self),
4372 })
4373 })
4374 }
4375
4376 /// Returns all the diagnostic groups associated with the given
4377 /// language server ID. If no language server ID is provided,
4378 /// all diagnostics groups are returned.
4379 pub fn diagnostic_groups(
4380 &self,
4381 language_server_id: Option<LanguageServerId>,
4382 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4383 let mut groups = Vec::new();
4384
4385 if let Some(language_server_id) = language_server_id {
4386 if let Ok(ix) = self
4387 .diagnostics
4388 .binary_search_by_key(&language_server_id, |e| e.0)
4389 {
4390 self.diagnostics[ix]
4391 .1
4392 .groups(language_server_id, &mut groups, self);
4393 }
4394 } else {
4395 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4396 diagnostics.groups(*language_server_id, &mut groups, self);
4397 }
4398 }
4399
4400 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4401 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4402 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4403 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4404 });
4405
4406 groups
4407 }
4408
4409 /// Returns an iterator over the diagnostics for the given group.
4410 pub fn diagnostic_group<O>(
4411 &self,
4412 group_id: usize,
4413 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4414 where
4415 O: FromAnchor + 'static,
4416 {
4417 self.diagnostics
4418 .iter()
4419 .flat_map(move |(_, set)| set.group(group_id, self))
4420 }
4421
4422 /// An integer version number that accounts for all updates besides
4423 /// the buffer's text itself (which is versioned via a version vector).
4424 pub fn non_text_state_update_count(&self) -> usize {
4425 self.non_text_state_update_count
4426 }
4427
4428 /// An integer version that changes when the buffer's syntax changes.
4429 pub fn syntax_update_count(&self) -> usize {
4430 self.syntax.update_count()
4431 }
4432
4433 /// Returns a snapshot of underlying file.
4434 pub fn file(&self) -> Option<&Arc<dyn File>> {
4435 self.file.as_ref()
4436 }
4437
4438 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4439 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4440 if let Some(file) = self.file() {
4441 if file.path().file_name().is_none() || include_root {
4442 Some(file.full_path(cx))
4443 } else {
4444 Some(file.path().to_path_buf())
4445 }
4446 } else {
4447 None
4448 }
4449 }
4450
4451 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4452 let query_str = query.fuzzy_contents;
4453 if query_str.is_some_and(|query| query.is_empty()) {
4454 return BTreeMap::default();
4455 }
4456
4457 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4458 language,
4459 override_id: None,
4460 }));
4461
4462 let mut query_ix = 0;
4463 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4464 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4465
4466 let mut words = BTreeMap::default();
4467 let mut current_word_start_ix = None;
4468 let mut chunk_ix = query.range.start;
4469 for chunk in self.chunks(query.range, false) {
4470 for (i, c) in chunk.text.char_indices() {
4471 let ix = chunk_ix + i;
4472 if classifier.is_word(c) {
4473 if current_word_start_ix.is_none() {
4474 current_word_start_ix = Some(ix);
4475 }
4476
4477 if let Some(query_chars) = &query_chars
4478 && query_ix < query_len
4479 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4480 {
4481 query_ix += 1;
4482 }
4483 continue;
4484 } else if let Some(word_start) = current_word_start_ix.take()
4485 && query_ix == query_len
4486 {
4487 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4488 let mut word_text = self.text_for_range(word_start..ix).peekable();
4489 let first_char = word_text
4490 .peek()
4491 .and_then(|first_chunk| first_chunk.chars().next());
4492 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4493 if !query.skip_digits
4494 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4495 {
4496 words.insert(word_text.collect(), word_range);
4497 }
4498 }
4499 query_ix = 0;
4500 }
4501 chunk_ix += chunk.text.len();
4502 }
4503
4504 words
4505 }
4506}
4507
4508pub struct WordsQuery<'a> {
4509 /// Only returns words with all chars from the fuzzy string in them.
4510 pub fuzzy_contents: Option<&'a str>,
4511 /// Skips words that start with a digit.
4512 pub skip_digits: bool,
4513 /// Buffer offset range, to look for words.
4514 pub range: Range<usize>,
4515}
4516
4517fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4518 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4519}
4520
4521fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4522 let mut result = IndentSize::spaces(0);
4523 for c in text {
4524 let kind = match c {
4525 ' ' => IndentKind::Space,
4526 '\t' => IndentKind::Tab,
4527 _ => break,
4528 };
4529 if result.len == 0 {
4530 result.kind = kind;
4531 }
4532 result.len += 1;
4533 }
4534 result
4535}
4536
4537impl Clone for BufferSnapshot {
4538 fn clone(&self) -> Self {
4539 Self {
4540 text: self.text.clone(),
4541 syntax: self.syntax.clone(),
4542 file: self.file.clone(),
4543 remote_selections: self.remote_selections.clone(),
4544 diagnostics: self.diagnostics.clone(),
4545 language: self.language.clone(),
4546 non_text_state_update_count: self.non_text_state_update_count,
4547 }
4548 }
4549}
4550
4551impl Deref for BufferSnapshot {
4552 type Target = text::BufferSnapshot;
4553
4554 fn deref(&self) -> &Self::Target {
4555 &self.text
4556 }
4557}
4558
4559unsafe impl Send for BufferChunks<'_> {}
4560
4561impl<'a> BufferChunks<'a> {
4562 pub(crate) fn new(
4563 text: &'a Rope,
4564 range: Range<usize>,
4565 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4566 diagnostics: bool,
4567 buffer_snapshot: Option<&'a BufferSnapshot>,
4568 ) -> Self {
4569 let mut highlights = None;
4570 if let Some((captures, highlight_maps)) = syntax {
4571 highlights = Some(BufferChunkHighlights {
4572 captures,
4573 next_capture: None,
4574 stack: Default::default(),
4575 highlight_maps,
4576 })
4577 }
4578
4579 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4580 let chunks = text.chunks_in_range(range.clone());
4581
4582 let mut this = BufferChunks {
4583 range,
4584 buffer_snapshot,
4585 chunks,
4586 diagnostic_endpoints,
4587 error_depth: 0,
4588 warning_depth: 0,
4589 information_depth: 0,
4590 hint_depth: 0,
4591 unnecessary_depth: 0,
4592 underline: true,
4593 highlights,
4594 };
4595 this.initialize_diagnostic_endpoints();
4596 this
4597 }
4598
4599 /// Seeks to the given byte offset in the buffer.
4600 pub fn seek(&mut self, range: Range<usize>) {
4601 let old_range = std::mem::replace(&mut self.range, range.clone());
4602 self.chunks.set_range(self.range.clone());
4603 if let Some(highlights) = self.highlights.as_mut() {
4604 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4605 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4606 highlights
4607 .stack
4608 .retain(|(end_offset, _)| *end_offset > range.start);
4609 if let Some(capture) = &highlights.next_capture
4610 && range.start >= capture.node.start_byte()
4611 {
4612 let next_capture_end = capture.node.end_byte();
4613 if range.start < next_capture_end {
4614 highlights.stack.push((
4615 next_capture_end,
4616 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4617 ));
4618 }
4619 highlights.next_capture.take();
4620 }
4621 } else if let Some(snapshot) = self.buffer_snapshot {
4622 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4623 *highlights = BufferChunkHighlights {
4624 captures,
4625 next_capture: None,
4626 stack: Default::default(),
4627 highlight_maps,
4628 };
4629 } else {
4630 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4631 // Seeking such BufferChunks is not supported.
4632 debug_assert!(
4633 false,
4634 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4635 );
4636 }
4637
4638 highlights.captures.set_byte_range(self.range.clone());
4639 self.initialize_diagnostic_endpoints();
4640 }
4641 }
4642
4643 fn initialize_diagnostic_endpoints(&mut self) {
4644 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4645 && let Some(buffer) = self.buffer_snapshot
4646 {
4647 let mut diagnostic_endpoints = Vec::new();
4648 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4649 diagnostic_endpoints.push(DiagnosticEndpoint {
4650 offset: entry.range.start,
4651 is_start: true,
4652 severity: entry.diagnostic.severity,
4653 is_unnecessary: entry.diagnostic.is_unnecessary,
4654 underline: entry.diagnostic.underline,
4655 });
4656 diagnostic_endpoints.push(DiagnosticEndpoint {
4657 offset: entry.range.end,
4658 is_start: false,
4659 severity: entry.diagnostic.severity,
4660 is_unnecessary: entry.diagnostic.is_unnecessary,
4661 underline: entry.diagnostic.underline,
4662 });
4663 }
4664 diagnostic_endpoints
4665 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4666 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4667 self.hint_depth = 0;
4668 self.error_depth = 0;
4669 self.warning_depth = 0;
4670 self.information_depth = 0;
4671 }
4672 }
4673
4674 /// The current byte offset in the buffer.
4675 pub fn offset(&self) -> usize {
4676 self.range.start
4677 }
4678
4679 pub fn range(&self) -> Range<usize> {
4680 self.range.clone()
4681 }
4682
4683 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4684 let depth = match endpoint.severity {
4685 DiagnosticSeverity::ERROR => &mut self.error_depth,
4686 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4687 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4688 DiagnosticSeverity::HINT => &mut self.hint_depth,
4689 _ => return,
4690 };
4691 if endpoint.is_start {
4692 *depth += 1;
4693 } else {
4694 *depth -= 1;
4695 }
4696
4697 if endpoint.is_unnecessary {
4698 if endpoint.is_start {
4699 self.unnecessary_depth += 1;
4700 } else {
4701 self.unnecessary_depth -= 1;
4702 }
4703 }
4704 }
4705
4706 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4707 if self.error_depth > 0 {
4708 Some(DiagnosticSeverity::ERROR)
4709 } else if self.warning_depth > 0 {
4710 Some(DiagnosticSeverity::WARNING)
4711 } else if self.information_depth > 0 {
4712 Some(DiagnosticSeverity::INFORMATION)
4713 } else if self.hint_depth > 0 {
4714 Some(DiagnosticSeverity::HINT)
4715 } else {
4716 None
4717 }
4718 }
4719
4720 fn current_code_is_unnecessary(&self) -> bool {
4721 self.unnecessary_depth > 0
4722 }
4723}
4724
4725impl<'a> Iterator for BufferChunks<'a> {
4726 type Item = Chunk<'a>;
4727
4728 fn next(&mut self) -> Option<Self::Item> {
4729 let mut next_capture_start = usize::MAX;
4730 let mut next_diagnostic_endpoint = usize::MAX;
4731
4732 if let Some(highlights) = self.highlights.as_mut() {
4733 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4734 if *parent_capture_end <= self.range.start {
4735 highlights.stack.pop();
4736 } else {
4737 break;
4738 }
4739 }
4740
4741 if highlights.next_capture.is_none() {
4742 highlights.next_capture = highlights.captures.next();
4743 }
4744
4745 while let Some(capture) = highlights.next_capture.as_ref() {
4746 if self.range.start < capture.node.start_byte() {
4747 next_capture_start = capture.node.start_byte();
4748 break;
4749 } else {
4750 let highlight_id =
4751 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4752 highlights
4753 .stack
4754 .push((capture.node.end_byte(), highlight_id));
4755 highlights.next_capture = highlights.captures.next();
4756 }
4757 }
4758 }
4759
4760 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4761 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4762 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4763 if endpoint.offset <= self.range.start {
4764 self.update_diagnostic_depths(endpoint);
4765 diagnostic_endpoints.next();
4766 self.underline = endpoint.underline;
4767 } else {
4768 next_diagnostic_endpoint = endpoint.offset;
4769 break;
4770 }
4771 }
4772 }
4773 self.diagnostic_endpoints = diagnostic_endpoints;
4774
4775 if let Some(chunk) = self.chunks.peek() {
4776 let chunk_start = self.range.start;
4777 let mut chunk_end = (self.chunks.offset() + chunk.len())
4778 .min(next_capture_start)
4779 .min(next_diagnostic_endpoint);
4780 let mut highlight_id = None;
4781 if let Some(highlights) = self.highlights.as_ref()
4782 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4783 {
4784 chunk_end = chunk_end.min(*parent_capture_end);
4785 highlight_id = Some(*parent_highlight_id);
4786 }
4787
4788 let slice =
4789 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4790 self.range.start = chunk_end;
4791 if self.range.start == self.chunks.offset() + chunk.len() {
4792 self.chunks.next().unwrap();
4793 }
4794
4795 Some(Chunk {
4796 text: slice,
4797 syntax_highlight_id: highlight_id,
4798 underline: self.underline,
4799 diagnostic_severity: self.current_diagnostic_severity(),
4800 is_unnecessary: self.current_code_is_unnecessary(),
4801 ..Chunk::default()
4802 })
4803 } else {
4804 None
4805 }
4806 }
4807}
4808
4809impl operation_queue::Operation for Operation {
4810 fn lamport_timestamp(&self) -> clock::Lamport {
4811 match self {
4812 Operation::Buffer(_) => {
4813 unreachable!("buffer operations should never be deferred at this layer")
4814 }
4815 Operation::UpdateDiagnostics {
4816 lamport_timestamp, ..
4817 }
4818 | Operation::UpdateSelections {
4819 lamport_timestamp, ..
4820 }
4821 | Operation::UpdateCompletionTriggers {
4822 lamport_timestamp, ..
4823 } => *lamport_timestamp,
4824 }
4825 }
4826}
4827
4828impl Default for Diagnostic {
4829 fn default() -> Self {
4830 Self {
4831 source: Default::default(),
4832 source_kind: DiagnosticSourceKind::Other,
4833 code: None,
4834 code_description: None,
4835 severity: DiagnosticSeverity::ERROR,
4836 message: Default::default(),
4837 markdown: None,
4838 group_id: 0,
4839 is_primary: false,
4840 is_disk_based: false,
4841 is_unnecessary: false,
4842 underline: true,
4843 data: None,
4844 }
4845 }
4846}
4847
4848impl IndentSize {
4849 /// Returns an [`IndentSize`] representing the given spaces.
4850 pub fn spaces(len: u32) -> Self {
4851 Self {
4852 len,
4853 kind: IndentKind::Space,
4854 }
4855 }
4856
4857 /// Returns an [`IndentSize`] representing a tab.
4858 pub fn tab() -> Self {
4859 Self {
4860 len: 1,
4861 kind: IndentKind::Tab,
4862 }
4863 }
4864
4865 /// An iterator over the characters represented by this [`IndentSize`].
4866 pub fn chars(&self) -> impl Iterator<Item = char> {
4867 iter::repeat(self.char()).take(self.len as usize)
4868 }
4869
4870 /// The character representation of this [`IndentSize`].
4871 pub fn char(&self) -> char {
4872 match self.kind {
4873 IndentKind::Space => ' ',
4874 IndentKind::Tab => '\t',
4875 }
4876 }
4877
4878 /// Consumes the current [`IndentSize`] and returns a new one that has
4879 /// been shrunk or enlarged by the given size along the given direction.
4880 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4881 match direction {
4882 Ordering::Less => {
4883 if self.kind == size.kind && self.len >= size.len {
4884 self.len -= size.len;
4885 }
4886 }
4887 Ordering::Equal => {}
4888 Ordering::Greater => {
4889 if self.len == 0 {
4890 self = size;
4891 } else if self.kind == size.kind {
4892 self.len += size.len;
4893 }
4894 }
4895 }
4896 self
4897 }
4898
4899 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4900 match self.kind {
4901 IndentKind::Space => self.len as usize,
4902 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4903 }
4904 }
4905}
4906
4907#[cfg(any(test, feature = "test-support"))]
4908pub struct TestFile {
4909 pub path: Arc<Path>,
4910 pub root_name: String,
4911 pub local_root: Option<PathBuf>,
4912}
4913
4914#[cfg(any(test, feature = "test-support"))]
4915impl File for TestFile {
4916 fn path(&self) -> &Arc<Path> {
4917 &self.path
4918 }
4919
4920 fn full_path(&self, _: &gpui::App) -> PathBuf {
4921 PathBuf::from(&self.root_name).join(self.path.as_ref())
4922 }
4923
4924 fn as_local(&self) -> Option<&dyn LocalFile> {
4925 if self.local_root.is_some() {
4926 Some(self)
4927 } else {
4928 None
4929 }
4930 }
4931
4932 fn disk_state(&self) -> DiskState {
4933 unimplemented!()
4934 }
4935
4936 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4937 self.path().file_name().unwrap_or(self.root_name.as_ref())
4938 }
4939
4940 fn worktree_id(&self, _: &App) -> WorktreeId {
4941 WorktreeId::from_usize(0)
4942 }
4943
4944 fn to_proto(&self, _: &App) -> rpc::proto::File {
4945 unimplemented!()
4946 }
4947
4948 fn is_private(&self) -> bool {
4949 false
4950 }
4951}
4952
4953#[cfg(any(test, feature = "test-support"))]
4954impl LocalFile for TestFile {
4955 fn abs_path(&self, _cx: &App) -> PathBuf {
4956 PathBuf::from(self.local_root.as_ref().unwrap())
4957 .join(&self.root_name)
4958 .join(self.path.as_ref())
4959 }
4960
4961 fn load(&self, _cx: &App) -> Task<Result<String>> {
4962 unimplemented!()
4963 }
4964
4965 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4966 unimplemented!()
4967 }
4968}
4969
4970pub(crate) fn contiguous_ranges(
4971 values: impl Iterator<Item = u32>,
4972 max_len: usize,
4973) -> impl Iterator<Item = Range<u32>> {
4974 let mut values = values;
4975 let mut current_range: Option<Range<u32>> = None;
4976 std::iter::from_fn(move || {
4977 loop {
4978 if let Some(value) = values.next() {
4979 if let Some(range) = &mut current_range
4980 && value == range.end
4981 && range.len() < max_len
4982 {
4983 range.end += 1;
4984 continue;
4985 }
4986
4987 let prev_range = current_range.clone();
4988 current_range = Some(value..(value + 1));
4989 if prev_range.is_some() {
4990 return prev_range;
4991 }
4992 } else {
4993 return current_range.take();
4994 }
4995 }
4996 })
4997}
4998
4999#[derive(Default, Debug)]
5000pub struct CharClassifier {
5001 scope: Option<LanguageScope>,
5002 for_completion: bool,
5003 ignore_punctuation: bool,
5004}
5005
5006impl CharClassifier {
5007 pub fn new(scope: Option<LanguageScope>) -> Self {
5008 Self {
5009 scope,
5010 for_completion: false,
5011 ignore_punctuation: false,
5012 }
5013 }
5014
5015 pub fn for_completion(self, for_completion: bool) -> Self {
5016 Self {
5017 for_completion,
5018 ..self
5019 }
5020 }
5021
5022 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5023 Self {
5024 ignore_punctuation,
5025 ..self
5026 }
5027 }
5028
5029 pub fn is_whitespace(&self, c: char) -> bool {
5030 self.kind(c) == CharKind::Whitespace
5031 }
5032
5033 pub fn is_word(&self, c: char) -> bool {
5034 self.kind(c) == CharKind::Word
5035 }
5036
5037 pub fn is_punctuation(&self, c: char) -> bool {
5038 self.kind(c) == CharKind::Punctuation
5039 }
5040
5041 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5042 if c.is_alphanumeric() || c == '_' {
5043 return CharKind::Word;
5044 }
5045
5046 if let Some(scope) = &self.scope {
5047 let characters = if self.for_completion {
5048 scope.completion_query_characters()
5049 } else {
5050 scope.word_characters()
5051 };
5052 if let Some(characters) = characters
5053 && characters.contains(&c)
5054 {
5055 return CharKind::Word;
5056 }
5057 }
5058
5059 if c.is_whitespace() {
5060 return CharKind::Whitespace;
5061 }
5062
5063 if ignore_punctuation {
5064 CharKind::Word
5065 } else {
5066 CharKind::Punctuation
5067 }
5068 }
5069
5070 pub fn kind(&self, c: char) -> CharKind {
5071 self.kind_with(c, self.ignore_punctuation)
5072 }
5073}
5074
5075/// Find all of the ranges of whitespace that occur at the ends of lines
5076/// in the given rope.
5077///
5078/// This could also be done with a regex search, but this implementation
5079/// avoids copying text.
5080pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5081 let mut ranges = Vec::new();
5082
5083 let mut offset = 0;
5084 let mut prev_chunk_trailing_whitespace_range = 0..0;
5085 for chunk in rope.chunks() {
5086 let mut prev_line_trailing_whitespace_range = 0..0;
5087 for (i, line) in chunk.split('\n').enumerate() {
5088 let line_end_offset = offset + line.len();
5089 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5090 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5091
5092 if i == 0 && trimmed_line_len == 0 {
5093 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5094 }
5095 if !prev_line_trailing_whitespace_range.is_empty() {
5096 ranges.push(prev_line_trailing_whitespace_range);
5097 }
5098
5099 offset = line_end_offset + 1;
5100 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5101 }
5102
5103 offset -= 1;
5104 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5105 }
5106
5107 if !prev_chunk_trailing_whitespace_range.is_empty() {
5108 ranges.push(prev_chunk_trailing_whitespace_range);
5109 }
5110
5111 ranges
5112}