1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::{SettingsUi, WorktreeId};
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(
177 Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
178)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underline,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<NumberOrString>,
207 pub code_description: Option<lsp::Uri>,
208 /// Whether this diagnostic is a hint, warning, or error.
209 pub severity: DiagnosticSeverity,
210 /// The human-readable message associated with this diagnostic.
211 pub message: String,
212 /// The human-readable message (in markdown format)
213 pub markdown: Option<String>,
214 /// An id that identifies the group to which this diagnostic belongs.
215 ///
216 /// When a language server produces a diagnostic with
217 /// one or more associated diagnostics, those diagnostics are all
218 /// assigned a single group ID.
219 pub group_id: usize,
220 /// Whether this diagnostic is the primary diagnostic for its group.
221 ///
222 /// In a given group, the primary diagnostic is the top-level diagnostic
223 /// returned by the language server. The non-primary diagnostics are the
224 /// associated diagnostics.
225 pub is_primary: bool,
226 /// Whether this diagnostic is considered to originate from an analysis of
227 /// files on disk, as opposed to any unsaved buffer contents. This is a
228 /// property of a given diagnostic source, and is configured for a given
229 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
230 /// for the language server.
231 pub is_disk_based: bool,
232 /// Whether this diagnostic marks unnecessary code.
233 pub is_unnecessary: bool,
234 /// Quick separation of diagnostics groups based by their source.
235 pub source_kind: DiagnosticSourceKind,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238 /// Whether to underline the corresponding text range in the editor.
239 pub underline: bool,
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
243pub enum DiagnosticSourceKind {
244 Pulled,
245 Pushed,
246 Other,
247}
248
249/// An operation used to synchronize this buffer with its other replicas.
250#[derive(Clone, Debug, PartialEq)]
251pub enum Operation {
252 /// A text operation.
253 Buffer(text::Operation),
254
255 /// An update to the buffer's diagnostics.
256 UpdateDiagnostics {
257 /// The id of the language server that produced the new diagnostics.
258 server_id: LanguageServerId,
259 /// The diagnostics.
260 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
261 /// The buffer's lamport timestamp.
262 lamport_timestamp: clock::Lamport,
263 },
264
265 /// An update to the most recent selections in this buffer.
266 UpdateSelections {
267 /// The selections.
268 selections: Arc<[Selection<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// Whether the selections are in 'line mode'.
272 line_mode: bool,
273 /// The [`CursorShape`] associated with these selections.
274 cursor_shape: CursorShape,
275 },
276
277 /// An update to the characters that should trigger autocompletion
278 /// for this buffer.
279 UpdateCompletionTriggers {
280 /// The characters that trigger autocompletion.
281 triggers: Vec<String>,
282 /// The buffer's lamport timestamp.
283 lamport_timestamp: clock::Lamport,
284 /// The language server ID.
285 server_id: LanguageServerId,
286 },
287}
288
289/// An event that occurs in a buffer.
290#[derive(Clone, Debug, PartialEq)]
291pub enum BufferEvent {
292 /// The buffer was changed in a way that must be
293 /// propagated to its other replicas.
294 Operation {
295 operation: Operation,
296 is_local: bool,
297 },
298 /// The buffer was edited.
299 Edited,
300 /// The buffer's `dirty` bit changed.
301 DirtyChanged,
302 /// The buffer was saved.
303 Saved,
304 /// The buffer's file was changed on disk.
305 FileHandleChanged,
306 /// The buffer was reloaded.
307 Reloaded,
308 /// The buffer is in need of a reload
309 ReloadNeeded,
310 /// The buffer's language was changed.
311 LanguageChanged,
312 /// The buffer's syntax trees were updated.
313 Reparsed,
314 /// The buffer's diagnostics were updated.
315 DiagnosticsUpdated,
316 /// The buffer gained or lost editing capabilities.
317 CapabilityChanged,
318 /// The buffer was explicitly requested to close.
319 Closed,
320 /// The buffer was discarded when closing.
321 Discarded,
322}
323
324/// The file associated with a buffer.
325pub trait File: Send + Sync + Any {
326 /// Returns the [`LocalFile`] associated with this file, if the
327 /// file is local.
328 fn as_local(&self) -> Option<&dyn LocalFile>;
329
330 /// Returns whether this file is local.
331 fn is_local(&self) -> bool {
332 self.as_local().is_some()
333 }
334
335 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
336 /// only available in some states, such as modification time.
337 fn disk_state(&self) -> DiskState;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &App) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self, cx: &App) -> WorktreeId;
354
355 /// Converts this file into a protobuf message.
356 fn to_proto(&self, cx: &App) -> rpc::proto::File;
357
358 /// Return whether Zed considers this to be a private file.
359 fn is_private(&self) -> bool;
360}
361
362/// The file's storage status - whether it's stored (`Present`), and if so when it was last
363/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
364/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
365/// indicator for new files.
366#[derive(Copy, Clone, Debug, PartialEq)]
367pub enum DiskState {
368 /// File created in Zed that has not been saved.
369 New,
370 /// File present on the filesystem.
371 Present { mtime: MTime },
372 /// Deleted file that was previously present.
373 Deleted,
374}
375
376impl DiskState {
377 /// Returns the file's last known modification time on disk.
378 pub fn mtime(self) -> Option<MTime> {
379 match self {
380 DiskState::New => None,
381 DiskState::Present { mtime } => Some(mtime),
382 DiskState::Deleted => None,
383 }
384 }
385
386 pub fn exists(&self) -> bool {
387 match self {
388 DiskState::New => false,
389 DiskState::Present { .. } => true,
390 DiskState::Deleted => false,
391 }
392 }
393}
394
395/// The file associated with a buffer, in the case where the file is on the local disk.
396pub trait LocalFile: File {
397 /// Returns the absolute path of this file
398 fn abs_path(&self, cx: &App) -> PathBuf;
399
400 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
401 fn load(&self, cx: &App) -> Task<Result<String>>;
402
403 /// Loads the file's contents from disk.
404 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
405}
406
407/// The auto-indent behavior associated with an editing operation.
408/// For some editing operations, each affected line of text has its
409/// indentation recomputed. For other operations, the entire block
410/// of edited text is adjusted uniformly.
411#[derive(Clone, Debug)]
412pub enum AutoindentMode {
413 /// Indent each line of inserted text.
414 EachLine,
415 /// Apply the same indentation adjustment to all of the lines
416 /// in a given insertion.
417 Block {
418 /// The original indentation column of the first line of each
419 /// insertion, if it has been copied.
420 ///
421 /// Knowing this makes it possible to preserve the relative indentation
422 /// of every line in the insertion from when it was copied.
423 ///
424 /// If the original indent column is `a`, and the first line of insertion
425 /// is then auto-indented to column `b`, then every other line of
426 /// the insertion will be auto-indented to column `b - a`
427 original_indent_columns: Vec<Option<u32>>,
428 },
429}
430
431#[derive(Clone)]
432struct AutoindentRequest {
433 before_edit: BufferSnapshot,
434 entries: Vec<AutoindentRequestEntry>,
435 is_block_mode: bool,
436 ignore_empty_lines: bool,
437}
438
439#[derive(Debug, Clone)]
440struct AutoindentRequestEntry {
441 /// A range of the buffer whose indentation should be adjusted.
442 range: Range<Anchor>,
443 /// Whether or not these lines should be considered brand new, for the
444 /// purpose of auto-indent. When text is not new, its indentation will
445 /// only be adjusted if the suggested indentation level has *changed*
446 /// since the edit was made.
447 first_line_is_new: bool,
448 indent_size: IndentSize,
449 original_indent_column: Option<u32>,
450}
451
452#[derive(Debug)]
453struct IndentSuggestion {
454 basis_row: u32,
455 delta: Ordering,
456 within_error: bool,
457}
458
459struct BufferChunkHighlights<'a> {
460 captures: SyntaxMapCaptures<'a>,
461 next_capture: Option<SyntaxMapCapture<'a>>,
462 stack: Vec<(usize, HighlightId)>,
463 highlight_maps: Vec<HighlightMap>,
464}
465
466/// An iterator that yields chunks of a buffer's text, along with their
467/// syntax highlights and diagnostic status.
468pub struct BufferChunks<'a> {
469 buffer_snapshot: Option<&'a BufferSnapshot>,
470 range: Range<usize>,
471 chunks: text::Chunks<'a>,
472 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
473 error_depth: usize,
474 warning_depth: usize,
475 information_depth: usize,
476 hint_depth: usize,
477 unnecessary_depth: usize,
478 underline: bool,
479 highlights: Option<BufferChunkHighlights<'a>>,
480}
481
482/// A chunk of a buffer's text, along with its syntax highlight and
483/// diagnostic status.
484#[derive(Clone, Debug, Default)]
485pub struct Chunk<'a> {
486 /// The text of the chunk.
487 pub text: &'a str,
488 /// The syntax highlighting style of the chunk.
489 pub syntax_highlight_id: Option<HighlightId>,
490 /// The highlight style that has been applied to this chunk in
491 /// the editor.
492 pub highlight_style: Option<HighlightStyle>,
493 /// The severity of diagnostic associated with this chunk, if any.
494 pub diagnostic_severity: Option<DiagnosticSeverity>,
495 /// Whether this chunk of text is marked as unnecessary.
496 pub is_unnecessary: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_tab: bool,
499 /// Whether this chunk of text was originally a tab character.
500 pub is_inlay: bool,
501 /// Whether to underline the corresponding text range in the editor.
502 pub underline: bool,
503}
504
505/// A set of edits to a given version of a buffer, computed asynchronously.
506#[derive(Debug)]
507pub struct Diff {
508 pub base_version: clock::Global,
509 pub line_ending: LineEnding,
510 pub edits: Vec<(Range<usize>, Arc<str>)>,
511}
512
513#[derive(Debug, Clone, Copy)]
514pub(crate) struct DiagnosticEndpoint {
515 offset: usize,
516 is_start: bool,
517 underline: bool,
518 severity: DiagnosticSeverity,
519 is_unnecessary: bool,
520}
521
522/// A class of characters, used for characterizing a run of text.
523#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
524pub enum CharKind {
525 /// Whitespace.
526 Whitespace,
527 /// Punctuation.
528 Punctuation,
529 /// Word.
530 Word,
531}
532
533/// A runnable is a set of data about a region that could be resolved into a task
534pub struct Runnable {
535 pub tags: SmallVec<[RunnableTag; 1]>,
536 pub language: Arc<Language>,
537 pub buffer: BufferId,
538}
539
540#[derive(Default, Clone, Debug)]
541pub struct HighlightedText {
542 pub text: SharedString,
543 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
544}
545
546#[derive(Default, Debug)]
547struct HighlightedTextBuilder {
548 pub text: String,
549 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
550}
551
552impl HighlightedText {
553 pub fn from_buffer_range<T: ToOffset>(
554 range: Range<T>,
555 snapshot: &text::BufferSnapshot,
556 syntax_snapshot: &SyntaxSnapshot,
557 override_style: Option<HighlightStyle>,
558 syntax_theme: &SyntaxTheme,
559 ) -> Self {
560 let mut highlighted_text = HighlightedTextBuilder::default();
561 highlighted_text.add_text_from_buffer_range(
562 range,
563 snapshot,
564 syntax_snapshot,
565 override_style,
566 syntax_theme,
567 );
568 highlighted_text.build()
569 }
570
571 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
572 gpui::StyledText::new(self.text.clone())
573 .with_default_highlights(default_style, self.highlights.iter().cloned())
574 }
575
576 /// Returns the first line without leading whitespace unless highlighted
577 /// and a boolean indicating if there are more lines after
578 pub fn first_line_preview(self) -> (Self, bool) {
579 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
580 let first_line = &self.text[..newline_ix];
581
582 // Trim leading whitespace, unless an edit starts prior to it.
583 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
584 if let Some((first_highlight_range, _)) = self.highlights.first() {
585 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
586 }
587
588 let preview_text = &first_line[preview_start_ix..];
589 let preview_highlights = self
590 .highlights
591 .into_iter()
592 .take_while(|(range, _)| range.start < newline_ix)
593 .filter_map(|(mut range, highlight)| {
594 range.start = range.start.saturating_sub(preview_start_ix);
595 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
596 if range.is_empty() {
597 None
598 } else {
599 Some((range, highlight))
600 }
601 });
602
603 let preview = Self {
604 text: SharedString::new(preview_text),
605 highlights: preview_highlights.collect(),
606 };
607
608 (preview, self.text.len() > newline_ix)
609 }
610}
611
612impl HighlightedTextBuilder {
613 pub fn build(self) -> HighlightedText {
614 HighlightedText {
615 text: self.text.into(),
616 highlights: self.highlights,
617 }
618 }
619
620 pub fn add_text_from_buffer_range<T: ToOffset>(
621 &mut self,
622 range: Range<T>,
623 snapshot: &text::BufferSnapshot,
624 syntax_snapshot: &SyntaxSnapshot,
625 override_style: Option<HighlightStyle>,
626 syntax_theme: &SyntaxTheme,
627 ) {
628 let range = range.to_offset(snapshot);
629 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
630 let start = self.text.len();
631 self.text.push_str(chunk.text);
632 let end = self.text.len();
633
634 if let Some(mut highlight_style) = chunk
635 .syntax_highlight_id
636 .and_then(|id| id.style(syntax_theme))
637 {
638 if let Some(override_style) = override_style {
639 highlight_style.highlight(override_style);
640 }
641 self.highlights.push((start..end, highlight_style));
642 } else if let Some(override_style) = override_style {
643 self.highlights.push((start..end, override_style));
644 }
645 }
646 }
647
648 fn highlighted_chunks<'a>(
649 range: Range<usize>,
650 snapshot: &'a text::BufferSnapshot,
651 syntax_snapshot: &'a SyntaxSnapshot,
652 ) -> BufferChunks<'a> {
653 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
654 grammar.highlights_query.as_ref()
655 });
656
657 let highlight_maps = captures
658 .grammars()
659 .iter()
660 .map(|grammar| grammar.highlight_map())
661 .collect();
662
663 BufferChunks::new(
664 snapshot.as_rope(),
665 range,
666 Some((captures, highlight_maps)),
667 false,
668 None,
669 )
670 }
671}
672
673#[derive(Clone)]
674pub struct EditPreview {
675 old_snapshot: text::BufferSnapshot,
676 applied_edits_snapshot: text::BufferSnapshot,
677 syntax_snapshot: SyntaxSnapshot,
678}
679
680impl EditPreview {
681 pub fn highlight_edits(
682 &self,
683 current_snapshot: &BufferSnapshot,
684 edits: &[(Range<Anchor>, String)],
685 include_deletions: bool,
686 cx: &App,
687 ) -> HighlightedText {
688 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
689 return HighlightedText::default();
690 };
691
692 let mut highlighted_text = HighlightedTextBuilder::default();
693
694 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
695
696 let insertion_highlight_style = HighlightStyle {
697 background_color: Some(cx.theme().status().created_background),
698 ..Default::default()
699 };
700 let deletion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().deleted_background),
702 ..Default::default()
703 };
704 let syntax_theme = cx.theme().syntax();
705
706 for (range, edit_text) in edits {
707 let edit_new_end_in_preview_snapshot = range
708 .end
709 .bias_right(&self.old_snapshot)
710 .to_offset(&self.applied_edits_snapshot);
711 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
712
713 let unchanged_range_in_preview_snapshot =
714 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
715 if !unchanged_range_in_preview_snapshot.is_empty() {
716 highlighted_text.add_text_from_buffer_range(
717 unchanged_range_in_preview_snapshot,
718 &self.applied_edits_snapshot,
719 &self.syntax_snapshot,
720 None,
721 syntax_theme,
722 );
723 }
724
725 let range_in_current_snapshot = range.to_offset(current_snapshot);
726 if include_deletions && !range_in_current_snapshot.is_empty() {
727 highlighted_text.add_text_from_buffer_range(
728 range_in_current_snapshot,
729 ¤t_snapshot.text,
730 ¤t_snapshot.syntax,
731 Some(deletion_highlight_style),
732 syntax_theme,
733 );
734 }
735
736 if !edit_text.is_empty() {
737 highlighted_text.add_text_from_buffer_range(
738 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
739 &self.applied_edits_snapshot,
740 &self.syntax_snapshot,
741 Some(insertion_highlight_style),
742 syntax_theme,
743 );
744 }
745
746 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
747 }
748
749 highlighted_text.add_text_from_buffer_range(
750 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
751 &self.applied_edits_snapshot,
752 &self.syntax_snapshot,
753 None,
754 syntax_theme,
755 );
756
757 highlighted_text.build()
758 }
759
760 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
761 let (first, _) = edits.first()?;
762 let (last, _) = edits.last()?;
763
764 let start = first
765 .start
766 .bias_left(&self.old_snapshot)
767 .to_point(&self.applied_edits_snapshot);
768 let end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
774 let range = Point::new(start.row, 0)
775 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
776
777 Some(range.to_offset(&self.applied_edits_snapshot))
778 }
779}
780
781#[derive(Clone, Debug, PartialEq, Eq)]
782pub struct BracketMatch {
783 pub open_range: Range<usize>,
784 pub close_range: Range<usize>,
785 pub newline_only: bool,
786}
787
788impl Buffer {
789 /// Create a new buffer with the given base text.
790 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
791 Self::build(
792 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
793 None,
794 Capability::ReadWrite,
795 )
796 }
797
798 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
799 pub fn local_normalized(
800 base_text_normalized: Rope,
801 line_ending: LineEnding,
802 cx: &Context<Self>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new_normalized(
806 0,
807 cx.entity_id().as_non_zero_u64().into(),
808 line_ending,
809 base_text_normalized,
810 ),
811 None,
812 Capability::ReadWrite,
813 )
814 }
815
816 /// Create a new buffer that is a replica of a remote buffer.
817 pub fn remote(
818 remote_id: BufferId,
819 replica_id: ReplicaId,
820 capability: Capability,
821 base_text: impl Into<String>,
822 ) -> Self {
823 Self::build(
824 TextBuffer::new(replica_id, remote_id, base_text.into()),
825 None,
826 capability,
827 )
828 }
829
830 /// Create a new buffer that is a replica of a remote buffer, populating its
831 /// state from the given protobuf message.
832 pub fn from_proto(
833 replica_id: ReplicaId,
834 capability: Capability,
835 message: proto::BufferState,
836 file: Option<Arc<dyn File>>,
837 ) -> Result<Self> {
838 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
839 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
840 let mut this = Self::build(buffer, file, capability);
841 this.text.set_line_ending(proto::deserialize_line_ending(
842 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
843 ));
844 this.saved_version = proto::deserialize_version(&message.saved_version);
845 this.saved_mtime = message.saved_mtime.map(|time| time.into());
846 Ok(this)
847 }
848
849 /// Serialize the buffer's state to a protobuf message.
850 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
851 proto::BufferState {
852 id: self.remote_id().into(),
853 file: self.file.as_ref().map(|f| f.to_proto(cx)),
854 base_text: self.base_text().to_string(),
855 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
856 saved_version: proto::serialize_version(&self.saved_version),
857 saved_mtime: self.saved_mtime.map(|time| time.into()),
858 }
859 }
860
861 /// Serialize as protobufs all of the changes to the buffer since the given version.
862 pub fn serialize_ops(
863 &self,
864 since: Option<clock::Global>,
865 cx: &App,
866 ) -> Task<Vec<proto::Operation>> {
867 let mut operations = Vec::new();
868 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
869
870 operations.extend(self.remote_selections.iter().map(|(_, set)| {
871 proto::serialize_operation(&Operation::UpdateSelections {
872 selections: set.selections.clone(),
873 lamport_timestamp: set.lamport_timestamp,
874 line_mode: set.line_mode,
875 cursor_shape: set.cursor_shape,
876 })
877 }));
878
879 for (server_id, diagnostics) in &self.diagnostics {
880 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
881 lamport_timestamp: self.diagnostics_timestamp,
882 server_id: *server_id,
883 diagnostics: diagnostics.iter().cloned().collect(),
884 }));
885 }
886
887 for (server_id, completions) in &self.completion_triggers_per_language_server {
888 operations.push(proto::serialize_operation(
889 &Operation::UpdateCompletionTriggers {
890 triggers: completions.iter().cloned().collect(),
891 lamport_timestamp: self.completion_triggers_timestamp,
892 server_id: *server_id,
893 },
894 ));
895 }
896
897 let text_operations = self.text.operations().clone();
898 cx.background_spawn(async move {
899 let since = since.unwrap_or_default();
900 operations.extend(
901 text_operations
902 .iter()
903 .filter(|(_, op)| !since.observed(op.timestamp()))
904 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
905 );
906 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
907 operations
908 })
909 }
910
911 /// Assign a language to the buffer, returning the buffer.
912 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
913 self.set_language(Some(language), cx);
914 self
915 }
916
917 /// Returns the [`Capability`] of this buffer.
918 pub fn capability(&self) -> Capability {
919 self.capability
920 }
921
922 /// Whether this buffer can only be read.
923 pub fn read_only(&self) -> bool {
924 self.capability == Capability::ReadOnly
925 }
926
927 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
928 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
929 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
930 let snapshot = buffer.snapshot();
931 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
932 Self {
933 saved_mtime,
934 saved_version: buffer.version(),
935 preview_version: buffer.version(),
936 reload_task: None,
937 transaction_depth: 0,
938 was_dirty_before_starting_transaction: None,
939 has_unsaved_edits: Cell::new((buffer.version(), false)),
940 text: buffer,
941 branch_state: None,
942 file,
943 capability,
944 syntax_map,
945 reparse: None,
946 non_text_state_update_count: 0,
947 sync_parse_timeout: Duration::from_millis(1),
948 parse_status: watch::channel(ParseStatus::Idle),
949 autoindent_requests: Default::default(),
950 wait_for_autoindent_txs: Default::default(),
951 pending_autoindent: Default::default(),
952 language: None,
953 remote_selections: Default::default(),
954 diagnostics: Default::default(),
955 diagnostics_timestamp: Default::default(),
956 completion_triggers: Default::default(),
957 completion_triggers_per_language_server: Default::default(),
958 completion_triggers_timestamp: Default::default(),
959 deferred_ops: OperationQueue::new(),
960 has_conflict: false,
961 change_bits: Default::default(),
962 _subscriptions: Vec::new(),
963 }
964 }
965
966 pub fn build_snapshot(
967 text: Rope,
968 language: Option<Arc<Language>>,
969 language_registry: Option<Arc<LanguageRegistry>>,
970 cx: &mut App,
971 ) -> impl Future<Output = BufferSnapshot> + use<> {
972 let entity_id = cx.reserve_entity::<Self>().entity_id();
973 let buffer_id = entity_id.as_non_zero_u64().into();
974 async move {
975 let text =
976 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
977 let mut syntax = SyntaxMap::new(&text).snapshot();
978 if let Some(language) = language.clone() {
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 syntax.reparse(&text, language_registry, language);
1024 }
1025 BufferSnapshot {
1026 text,
1027 syntax,
1028 file: None,
1029 diagnostics: Default::default(),
1030 remote_selections: Default::default(),
1031 language,
1032 non_text_state_update_count: 0,
1033 }
1034 }
1035
1036 /// Retrieve a snapshot of the buffer's current state. This is computationally
1037 /// cheap, and allows reading from the buffer on a background thread.
1038 pub fn snapshot(&self) -> BufferSnapshot {
1039 let text = self.text.snapshot();
1040 let mut syntax_map = self.syntax_map.lock();
1041 syntax_map.interpolate(&text);
1042 let syntax = syntax_map.snapshot();
1043
1044 BufferSnapshot {
1045 text,
1046 syntax,
1047 file: self.file.clone(),
1048 remote_selections: self.remote_selections.clone(),
1049 diagnostics: self.diagnostics.clone(),
1050 language: self.language.clone(),
1051 non_text_state_update_count: self.non_text_state_update_count,
1052 }
1053 }
1054
1055 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1056 let this = cx.entity();
1057 cx.new(|cx| {
1058 let mut branch = Self {
1059 branch_state: Some(BufferBranchState {
1060 base_buffer: this.clone(),
1061 merged_operations: Default::default(),
1062 }),
1063 language: self.language.clone(),
1064 has_conflict: self.has_conflict,
1065 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1066 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1067 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1068 };
1069 if let Some(language_registry) = self.language_registry() {
1070 branch.set_language_registry(language_registry);
1071 }
1072
1073 // Reparse the branch buffer so that we get syntax highlighting immediately.
1074 branch.reparse(cx);
1075
1076 branch
1077 })
1078 }
1079
1080 pub fn preview_edits(
1081 &self,
1082 edits: Arc<[(Range<Anchor>, String)]>,
1083 cx: &App,
1084 ) -> Task<EditPreview> {
1085 let registry = self.language_registry();
1086 let language = self.language().cloned();
1087 let old_snapshot = self.text.snapshot();
1088 let mut branch_buffer = self.text.branch();
1089 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1090 cx.background_spawn(async move {
1091 if !edits.is_empty() {
1092 if let Some(language) = language.clone() {
1093 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1094 }
1095
1096 branch_buffer.edit(edits.iter().cloned());
1097 let snapshot = branch_buffer.snapshot();
1098 syntax_snapshot.interpolate(&snapshot);
1099
1100 if let Some(language) = language {
1101 syntax_snapshot.reparse(&snapshot, registry, language);
1102 }
1103 }
1104 EditPreview {
1105 old_snapshot,
1106 applied_edits_snapshot: branch_buffer.snapshot(),
1107 syntax_snapshot,
1108 }
1109 })
1110 }
1111
1112 /// Applies all of the changes in this buffer that intersect any of the
1113 /// given `ranges` to its base buffer.
1114 ///
1115 /// If `ranges` is empty, then all changes will be applied. This buffer must
1116 /// be a branch buffer to call this method.
1117 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1118 let Some(base_buffer) = self.base_buffer() else {
1119 debug_panic!("not a branch buffer");
1120 return;
1121 };
1122
1123 let mut ranges = if ranges.is_empty() {
1124 &[0..usize::MAX]
1125 } else {
1126 ranges.as_slice()
1127 }
1128 .iter()
1129 .peekable();
1130
1131 let mut edits = Vec::new();
1132 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1133 let mut is_included = false;
1134 while let Some(range) = ranges.peek() {
1135 if range.end < edit.new.start {
1136 ranges.next().unwrap();
1137 } else {
1138 if range.start <= edit.new.end {
1139 is_included = true;
1140 }
1141 break;
1142 }
1143 }
1144
1145 if is_included {
1146 edits.push((
1147 edit.old.clone(),
1148 self.text_for_range(edit.new.clone()).collect::<String>(),
1149 ));
1150 }
1151 }
1152
1153 let operation = base_buffer.update(cx, |base_buffer, cx| {
1154 // cx.emit(BufferEvent::DiffBaseChanged);
1155 base_buffer.edit(edits, None, cx)
1156 });
1157
1158 if let Some(operation) = operation
1159 && let Some(BufferBranchState {
1160 merged_operations, ..
1161 }) = &mut self.branch_state
1162 {
1163 merged_operations.push(operation);
1164 }
1165 }
1166
1167 fn on_base_buffer_event(
1168 &mut self,
1169 _: Entity<Buffer>,
1170 event: &BufferEvent,
1171 cx: &mut Context<Self>,
1172 ) {
1173 let BufferEvent::Operation { operation, .. } = event else {
1174 return;
1175 };
1176 let Some(BufferBranchState {
1177 merged_operations, ..
1178 }) = &mut self.branch_state
1179 else {
1180 return;
1181 };
1182
1183 let mut operation_to_undo = None;
1184 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1185 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1186 {
1187 merged_operations.remove(ix);
1188 operation_to_undo = Some(operation.timestamp);
1189 }
1190
1191 self.apply_ops([operation.clone()], cx);
1192
1193 if let Some(timestamp) = operation_to_undo {
1194 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1195 self.undo_operations(counts, cx);
1196 }
1197 }
1198
1199 #[cfg(test)]
1200 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1201 &self.text
1202 }
1203
1204 /// Retrieve a snapshot of the buffer's raw text, without any
1205 /// language-related state like the syntax tree or diagnostics.
1206 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1207 self.text.snapshot()
1208 }
1209
1210 /// The file associated with the buffer, if any.
1211 pub fn file(&self) -> Option<&Arc<dyn File>> {
1212 self.file.as_ref()
1213 }
1214
1215 /// The version of the buffer that was last saved or reloaded from disk.
1216 pub fn saved_version(&self) -> &clock::Global {
1217 &self.saved_version
1218 }
1219
1220 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1221 pub fn saved_mtime(&self) -> Option<MTime> {
1222 self.saved_mtime
1223 }
1224
1225 /// Assign a language to the buffer.
1226 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1227 self.non_text_state_update_count += 1;
1228 self.syntax_map.lock().clear(&self.text);
1229 self.language = language;
1230 self.was_changed();
1231 self.reparse(cx);
1232 cx.emit(BufferEvent::LanguageChanged);
1233 }
1234
1235 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1236 /// other languages if parts of the buffer are written in different languages.
1237 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1238 self.syntax_map
1239 .lock()
1240 .set_language_registry(language_registry);
1241 }
1242
1243 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1244 self.syntax_map.lock().language_registry()
1245 }
1246
1247 /// Assign the buffer a new [`Capability`].
1248 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1249 self.capability = capability;
1250 cx.emit(BufferEvent::CapabilityChanged)
1251 }
1252
1253 /// This method is called to signal that the buffer has been saved.
1254 pub fn did_save(
1255 &mut self,
1256 version: clock::Global,
1257 mtime: Option<MTime>,
1258 cx: &mut Context<Self>,
1259 ) {
1260 self.saved_version = version;
1261 self.has_unsaved_edits
1262 .set((self.saved_version().clone(), false));
1263 self.has_conflict = false;
1264 self.saved_mtime = mtime;
1265 self.was_changed();
1266 cx.emit(BufferEvent::Saved);
1267 cx.notify();
1268 }
1269
1270 /// This method is called to signal that the buffer has been discarded.
1271 pub fn discarded(&self, cx: &mut Context<Self>) {
1272 cx.emit(BufferEvent::Discarded);
1273 cx.notify();
1274 }
1275
1276 /// Reloads the contents of the buffer from disk.
1277 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1278 let (tx, rx) = futures::channel::oneshot::channel();
1279 let prev_version = self.text.version();
1280 self.reload_task = Some(cx.spawn(async move |this, cx| {
1281 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1282 let file = this.file.as_ref()?.as_local()?;
1283
1284 Some((file.disk_state().mtime(), file.load(cx)))
1285 })?
1286 else {
1287 return Ok(());
1288 };
1289
1290 let new_text = new_text.await?;
1291 let diff = this
1292 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1293 .await;
1294 this.update(cx, |this, cx| {
1295 if this.version() == diff.base_version {
1296 this.finalize_last_transaction();
1297 this.apply_diff(diff, cx);
1298 tx.send(this.finalize_last_transaction().cloned()).ok();
1299 this.has_conflict = false;
1300 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1301 } else {
1302 if !diff.edits.is_empty()
1303 || this
1304 .edits_since::<usize>(&diff.base_version)
1305 .next()
1306 .is_some()
1307 {
1308 this.has_conflict = true;
1309 }
1310
1311 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1312 }
1313
1314 this.reload_task.take();
1315 })
1316 }));
1317 rx
1318 }
1319
1320 /// This method is called to signal that the buffer has been reloaded.
1321 pub fn did_reload(
1322 &mut self,
1323 version: clock::Global,
1324 line_ending: LineEnding,
1325 mtime: Option<MTime>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 self.saved_version = version;
1329 self.has_unsaved_edits
1330 .set((self.saved_version.clone(), false));
1331 self.text.set_line_ending(line_ending);
1332 self.saved_mtime = mtime;
1333 cx.emit(BufferEvent::Reloaded);
1334 cx.notify();
1335 }
1336
1337 /// Updates the [`File`] backing this buffer. This should be called when
1338 /// the file has changed or has been deleted.
1339 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1340 let was_dirty = self.is_dirty();
1341 let mut file_changed = false;
1342
1343 if let Some(old_file) = self.file.as_ref() {
1344 if new_file.path() != old_file.path() {
1345 file_changed = true;
1346 }
1347
1348 let old_state = old_file.disk_state();
1349 let new_state = new_file.disk_state();
1350 if old_state != new_state {
1351 file_changed = true;
1352 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1353 cx.emit(BufferEvent::ReloadNeeded)
1354 }
1355 }
1356 } else {
1357 file_changed = true;
1358 };
1359
1360 self.file = Some(new_file);
1361 if file_changed {
1362 self.was_changed();
1363 self.non_text_state_update_count += 1;
1364 if was_dirty != self.is_dirty() {
1365 cx.emit(BufferEvent::DirtyChanged);
1366 }
1367 cx.emit(BufferEvent::FileHandleChanged);
1368 cx.notify();
1369 }
1370 }
1371
1372 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1373 Some(self.branch_state.as_ref()?.base_buffer.clone())
1374 }
1375
1376 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1377 pub fn language(&self) -> Option<&Arc<Language>> {
1378 self.language.as_ref()
1379 }
1380
1381 /// Returns the [`Language`] at the given location.
1382 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1383 let offset = position.to_offset(self);
1384 let mut is_first = true;
1385 let start_anchor = self.anchor_before(offset);
1386 let end_anchor = self.anchor_after(offset);
1387 self.syntax_map
1388 .lock()
1389 .layers_for_range(offset..offset, &self.text, false)
1390 .filter(|layer| {
1391 if is_first {
1392 is_first = false;
1393 return true;
1394 }
1395
1396 layer
1397 .included_sub_ranges
1398 .map(|sub_ranges| {
1399 sub_ranges.iter().any(|sub_range| {
1400 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1401 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1402 !is_before_start && !is_after_end
1403 })
1404 })
1405 .unwrap_or(true)
1406 })
1407 .last()
1408 .map(|info| info.language.clone())
1409 .or_else(|| self.language.clone())
1410 }
1411
1412 /// Returns each [`Language`] for the active syntax layers at the given location.
1413 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1414 let offset = position.to_offset(self);
1415 let mut languages: Vec<Arc<Language>> = self
1416 .syntax_map
1417 .lock()
1418 .layers_for_range(offset..offset, &self.text, false)
1419 .map(|info| info.language.clone())
1420 .collect();
1421
1422 if languages.is_empty()
1423 && let Some(buffer_language) = self.language()
1424 {
1425 languages.push(buffer_language.clone());
1426 }
1427
1428 languages
1429 }
1430
1431 /// An integer version number that accounts for all updates besides
1432 /// the buffer's text itself (which is versioned via a version vector).
1433 pub fn non_text_state_update_count(&self) -> usize {
1434 self.non_text_state_update_count
1435 }
1436
1437 /// Whether the buffer is being parsed in the background.
1438 #[cfg(any(test, feature = "test-support"))]
1439 pub fn is_parsing(&self) -> bool {
1440 self.reparse.is_some()
1441 }
1442
1443 /// Indicates whether the buffer contains any regions that may be
1444 /// written in a language that hasn't been loaded yet.
1445 pub fn contains_unknown_injections(&self) -> bool {
1446 self.syntax_map.lock().contains_unknown_injections()
1447 }
1448
1449 #[cfg(any(test, feature = "test-support"))]
1450 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1451 self.sync_parse_timeout = timeout;
1452 }
1453
1454 /// Called after an edit to synchronize the buffer's main parse tree with
1455 /// the buffer's new underlying state.
1456 ///
1457 /// Locks the syntax map and interpolates the edits since the last reparse
1458 /// into the foreground syntax tree.
1459 ///
1460 /// Then takes a stable snapshot of the syntax map before unlocking it.
1461 /// The snapshot with the interpolated edits is sent to a background thread,
1462 /// where we ask Tree-sitter to perform an incremental parse.
1463 ///
1464 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1465 /// waiting on the parse to complete. As soon as it completes, we proceed
1466 /// synchronously, unless a 1ms timeout elapses.
1467 ///
1468 /// If we time out waiting on the parse, we spawn a second task waiting
1469 /// until the parse does complete and return with the interpolated tree still
1470 /// in the foreground. When the background parse completes, call back into
1471 /// the main thread and assign the foreground parse state.
1472 ///
1473 /// If the buffer or grammar changed since the start of the background parse,
1474 /// initiate an additional reparse recursively. To avoid concurrent parses
1475 /// for the same buffer, we only initiate a new parse if we are not already
1476 /// parsing in the background.
1477 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1478 if self.reparse.is_some() {
1479 return;
1480 }
1481 let language = if let Some(language) = self.language.clone() {
1482 language
1483 } else {
1484 return;
1485 };
1486
1487 let text = self.text_snapshot();
1488 let parsed_version = self.version();
1489
1490 let mut syntax_map = self.syntax_map.lock();
1491 syntax_map.interpolate(&text);
1492 let language_registry = syntax_map.language_registry();
1493 let mut syntax_snapshot = syntax_map.snapshot();
1494 drop(syntax_map);
1495
1496 let parse_task = cx.background_spawn({
1497 let language = language.clone();
1498 let language_registry = language_registry.clone();
1499 async move {
1500 syntax_snapshot.reparse(&text, language_registry, language);
1501 syntax_snapshot
1502 }
1503 });
1504
1505 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1506 match cx
1507 .background_executor()
1508 .block_with_timeout(self.sync_parse_timeout, parse_task)
1509 {
1510 Ok(new_syntax_snapshot) => {
1511 self.did_finish_parsing(new_syntax_snapshot, cx);
1512 self.reparse = None;
1513 }
1514 Err(parse_task) => {
1515 self.reparse = Some(cx.spawn(async move |this, cx| {
1516 let new_syntax_map = parse_task.await;
1517 this.update(cx, move |this, cx| {
1518 let grammar_changed =
1519 this.language.as_ref().is_none_or(|current_language| {
1520 !Arc::ptr_eq(&language, current_language)
1521 });
1522 let language_registry_changed = new_syntax_map
1523 .contains_unknown_injections()
1524 && language_registry.is_some_and(|registry| {
1525 registry.version() != new_syntax_map.language_registry_version()
1526 });
1527 let parse_again = language_registry_changed
1528 || grammar_changed
1529 || this.version.changed_since(&parsed_version);
1530 this.did_finish_parsing(new_syntax_map, cx);
1531 this.reparse = None;
1532 if parse_again {
1533 this.reparse(cx);
1534 }
1535 })
1536 .ok();
1537 }));
1538 }
1539 }
1540 }
1541
1542 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1543 self.was_changed();
1544 self.non_text_state_update_count += 1;
1545 self.syntax_map.lock().did_parse(syntax_snapshot);
1546 self.request_autoindent(cx);
1547 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1548 cx.emit(BufferEvent::Reparsed);
1549 cx.notify();
1550 }
1551
1552 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1553 self.parse_status.1.clone()
1554 }
1555
1556 /// Assign to the buffer a set of diagnostics created by a given language server.
1557 pub fn update_diagnostics(
1558 &mut self,
1559 server_id: LanguageServerId,
1560 diagnostics: DiagnosticSet,
1561 cx: &mut Context<Self>,
1562 ) {
1563 let lamport_timestamp = self.text.lamport_clock.tick();
1564 let op = Operation::UpdateDiagnostics {
1565 server_id,
1566 diagnostics: diagnostics.iter().cloned().collect(),
1567 lamport_timestamp,
1568 };
1569
1570 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1571 self.send_operation(op, true, cx);
1572 }
1573
1574 pub fn buffer_diagnostics(
1575 &self,
1576 for_server: Option<LanguageServerId>,
1577 ) -> Vec<&DiagnosticEntry<Anchor>> {
1578 match for_server {
1579 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1580 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1581 Err(_) => Vec::new(),
1582 },
1583 None => self
1584 .diagnostics
1585 .iter()
1586 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1587 .collect(),
1588 }
1589 }
1590
1591 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1592 if let Some(indent_sizes) = self.compute_autoindents() {
1593 let indent_sizes = cx.background_spawn(indent_sizes);
1594 match cx
1595 .background_executor()
1596 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1597 {
1598 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1599 Err(indent_sizes) => {
1600 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1601 let indent_sizes = indent_sizes.await;
1602 this.update(cx, |this, cx| {
1603 this.apply_autoindents(indent_sizes, cx);
1604 })
1605 .ok();
1606 }));
1607 }
1608 }
1609 } else {
1610 self.autoindent_requests.clear();
1611 for tx in self.wait_for_autoindent_txs.drain(..) {
1612 tx.send(()).ok();
1613 }
1614 }
1615 }
1616
1617 fn compute_autoindents(
1618 &self,
1619 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1620 let max_rows_between_yields = 100;
1621 let snapshot = self.snapshot();
1622 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1623 return None;
1624 }
1625
1626 let autoindent_requests = self.autoindent_requests.clone();
1627 Some(async move {
1628 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1629 for request in autoindent_requests {
1630 // Resolve each edited range to its row in the current buffer and in the
1631 // buffer before this batch of edits.
1632 let mut row_ranges = Vec::new();
1633 let mut old_to_new_rows = BTreeMap::new();
1634 let mut language_indent_sizes_by_new_row = Vec::new();
1635 for entry in &request.entries {
1636 let position = entry.range.start;
1637 let new_row = position.to_point(&snapshot).row;
1638 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1639 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1640
1641 if !entry.first_line_is_new {
1642 let old_row = position.to_point(&request.before_edit).row;
1643 old_to_new_rows.insert(old_row, new_row);
1644 }
1645 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1646 }
1647
1648 // Build a map containing the suggested indentation for each of the edited lines
1649 // with respect to the state of the buffer before these edits. This map is keyed
1650 // by the rows for these lines in the current state of the buffer.
1651 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1652 let old_edited_ranges =
1653 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1654 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1655 let mut language_indent_size = IndentSize::default();
1656 for old_edited_range in old_edited_ranges {
1657 let suggestions = request
1658 .before_edit
1659 .suggest_autoindents(old_edited_range.clone())
1660 .into_iter()
1661 .flatten();
1662 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1663 if let Some(suggestion) = suggestion {
1664 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1665
1666 // Find the indent size based on the language for this row.
1667 while let Some((row, size)) = language_indent_sizes.peek() {
1668 if *row > new_row {
1669 break;
1670 }
1671 language_indent_size = *size;
1672 language_indent_sizes.next();
1673 }
1674
1675 let suggested_indent = old_to_new_rows
1676 .get(&suggestion.basis_row)
1677 .and_then(|from_row| {
1678 Some(old_suggestions.get(from_row).copied()?.0)
1679 })
1680 .unwrap_or_else(|| {
1681 request
1682 .before_edit
1683 .indent_size_for_line(suggestion.basis_row)
1684 })
1685 .with_delta(suggestion.delta, language_indent_size);
1686 old_suggestions
1687 .insert(new_row, (suggested_indent, suggestion.within_error));
1688 }
1689 }
1690 yield_now().await;
1691 }
1692
1693 // Compute new suggestions for each line, but only include them in the result
1694 // if they differ from the old suggestion for that line.
1695 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1696 let mut language_indent_size = IndentSize::default();
1697 for (row_range, original_indent_column) in row_ranges {
1698 let new_edited_row_range = if request.is_block_mode {
1699 row_range.start..row_range.start + 1
1700 } else {
1701 row_range.clone()
1702 };
1703
1704 let suggestions = snapshot
1705 .suggest_autoindents(new_edited_row_range.clone())
1706 .into_iter()
1707 .flatten();
1708 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1709 if let Some(suggestion) = suggestion {
1710 // Find the indent size based on the language for this row.
1711 while let Some((row, size)) = language_indent_sizes.peek() {
1712 if *row > new_row {
1713 break;
1714 }
1715 language_indent_size = *size;
1716 language_indent_sizes.next();
1717 }
1718
1719 let suggested_indent = indent_sizes
1720 .get(&suggestion.basis_row)
1721 .copied()
1722 .map(|e| e.0)
1723 .unwrap_or_else(|| {
1724 snapshot.indent_size_for_line(suggestion.basis_row)
1725 })
1726 .with_delta(suggestion.delta, language_indent_size);
1727
1728 if old_suggestions.get(&new_row).is_none_or(
1729 |(old_indentation, was_within_error)| {
1730 suggested_indent != *old_indentation
1731 && (!suggestion.within_error || *was_within_error)
1732 },
1733 ) {
1734 indent_sizes.insert(
1735 new_row,
1736 (suggested_indent, request.ignore_empty_lines),
1737 );
1738 }
1739 }
1740 }
1741
1742 if let (true, Some(original_indent_column)) =
1743 (request.is_block_mode, original_indent_column)
1744 {
1745 let new_indent =
1746 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1747 *indent
1748 } else {
1749 snapshot.indent_size_for_line(row_range.start)
1750 };
1751 let delta = new_indent.len as i64 - original_indent_column as i64;
1752 if delta != 0 {
1753 for row in row_range.skip(1) {
1754 indent_sizes.entry(row).or_insert_with(|| {
1755 let mut size = snapshot.indent_size_for_line(row);
1756 if size.kind == new_indent.kind {
1757 match delta.cmp(&0) {
1758 Ordering::Greater => size.len += delta as u32,
1759 Ordering::Less => {
1760 size.len = size.len.saturating_sub(-delta as u32)
1761 }
1762 Ordering::Equal => {}
1763 }
1764 }
1765 (size, request.ignore_empty_lines)
1766 });
1767 }
1768 }
1769 }
1770
1771 yield_now().await;
1772 }
1773 }
1774
1775 indent_sizes
1776 .into_iter()
1777 .filter_map(|(row, (indent, ignore_empty_lines))| {
1778 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1779 None
1780 } else {
1781 Some((row, indent))
1782 }
1783 })
1784 .collect()
1785 })
1786 }
1787
1788 fn apply_autoindents(
1789 &mut self,
1790 indent_sizes: BTreeMap<u32, IndentSize>,
1791 cx: &mut Context<Self>,
1792 ) {
1793 self.autoindent_requests.clear();
1794 for tx in self.wait_for_autoindent_txs.drain(..) {
1795 tx.send(()).ok();
1796 }
1797
1798 let edits: Vec<_> = indent_sizes
1799 .into_iter()
1800 .filter_map(|(row, indent_size)| {
1801 let current_size = indent_size_for_line(self, row);
1802 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1803 })
1804 .collect();
1805
1806 let preserve_preview = self.preserve_preview();
1807 self.edit(edits, None, cx);
1808 if preserve_preview {
1809 self.refresh_preview();
1810 }
1811 }
1812
1813 /// Create a minimal edit that will cause the given row to be indented
1814 /// with the given size. After applying this edit, the length of the line
1815 /// will always be at least `new_size.len`.
1816 pub fn edit_for_indent_size_adjustment(
1817 row: u32,
1818 current_size: IndentSize,
1819 new_size: IndentSize,
1820 ) -> Option<(Range<Point>, String)> {
1821 if new_size.kind == current_size.kind {
1822 match new_size.len.cmp(¤t_size.len) {
1823 Ordering::Greater => {
1824 let point = Point::new(row, 0);
1825 Some((
1826 point..point,
1827 iter::repeat(new_size.char())
1828 .take((new_size.len - current_size.len) as usize)
1829 .collect::<String>(),
1830 ))
1831 }
1832
1833 Ordering::Less => Some((
1834 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1835 String::new(),
1836 )),
1837
1838 Ordering::Equal => None,
1839 }
1840 } else {
1841 Some((
1842 Point::new(row, 0)..Point::new(row, current_size.len),
1843 iter::repeat(new_size.char())
1844 .take(new_size.len as usize)
1845 .collect::<String>(),
1846 ))
1847 }
1848 }
1849
1850 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1851 /// and the given new text.
1852 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1853 let old_text = self.as_rope().clone();
1854 let base_version = self.version();
1855 cx.background_executor()
1856 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1857 let old_text = old_text.to_string();
1858 let line_ending = LineEnding::detect(&new_text);
1859 LineEnding::normalize(&mut new_text);
1860 let edits = text_diff(&old_text, &new_text);
1861 Diff {
1862 base_version,
1863 line_ending,
1864 edits,
1865 }
1866 })
1867 }
1868
1869 /// Spawns a background task that searches the buffer for any whitespace
1870 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1871 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1872 let old_text = self.as_rope().clone();
1873 let line_ending = self.line_ending();
1874 let base_version = self.version();
1875 cx.background_spawn(async move {
1876 let ranges = trailing_whitespace_ranges(&old_text);
1877 let empty = Arc::<str>::from("");
1878 Diff {
1879 base_version,
1880 line_ending,
1881 edits: ranges
1882 .into_iter()
1883 .map(|range| (range, empty.clone()))
1884 .collect(),
1885 }
1886 })
1887 }
1888
1889 /// Ensures that the buffer ends with a single newline character, and
1890 /// no other whitespace. Skips if the buffer is empty.
1891 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1892 let len = self.len();
1893 if len == 0 {
1894 return;
1895 }
1896 let mut offset = len;
1897 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1898 let non_whitespace_len = chunk
1899 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1900 .len();
1901 offset -= chunk.len();
1902 offset += non_whitespace_len;
1903 if non_whitespace_len != 0 {
1904 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1905 return;
1906 }
1907 break;
1908 }
1909 }
1910 self.edit([(offset..len, "\n")], None, cx);
1911 }
1912
1913 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1914 /// calculated, then adjust the diff to account for those changes, and discard any
1915 /// parts of the diff that conflict with those changes.
1916 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1917 let snapshot = self.snapshot();
1918 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1919 let mut delta = 0;
1920 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1921 while let Some(edit_since) = edits_since.peek() {
1922 // If the edit occurs after a diff hunk, then it does not
1923 // affect that hunk.
1924 if edit_since.old.start > range.end {
1925 break;
1926 }
1927 // If the edit precedes the diff hunk, then adjust the hunk
1928 // to reflect the edit.
1929 else if edit_since.old.end < range.start {
1930 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1931 edits_since.next();
1932 }
1933 // If the edit intersects a diff hunk, then discard that hunk.
1934 else {
1935 return None;
1936 }
1937 }
1938
1939 let start = (range.start as i64 + delta) as usize;
1940 let end = (range.end as i64 + delta) as usize;
1941 Some((start..end, new_text))
1942 });
1943
1944 self.start_transaction();
1945 self.text.set_line_ending(diff.line_ending);
1946 self.edit(adjusted_edits, None, cx);
1947 self.end_transaction(cx)
1948 }
1949
1950 fn has_unsaved_edits(&self) -> bool {
1951 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1952
1953 if last_version == self.version {
1954 self.has_unsaved_edits
1955 .set((last_version, has_unsaved_edits));
1956 return has_unsaved_edits;
1957 }
1958
1959 let has_edits = self.has_edits_since(&self.saved_version);
1960 self.has_unsaved_edits
1961 .set((self.version.clone(), has_edits));
1962 has_edits
1963 }
1964
1965 /// Checks if the buffer has unsaved changes.
1966 pub fn is_dirty(&self) -> bool {
1967 if self.capability == Capability::ReadOnly {
1968 return false;
1969 }
1970 if self.has_conflict {
1971 return true;
1972 }
1973 match self.file.as_ref().map(|f| f.disk_state()) {
1974 Some(DiskState::New) | Some(DiskState::Deleted) => {
1975 !self.is_empty() && self.has_unsaved_edits()
1976 }
1977 _ => self.has_unsaved_edits(),
1978 }
1979 }
1980
1981 /// Checks if the buffer and its file have both changed since the buffer
1982 /// was last saved or reloaded.
1983 pub fn has_conflict(&self) -> bool {
1984 if self.has_conflict {
1985 return true;
1986 }
1987 let Some(file) = self.file.as_ref() else {
1988 return false;
1989 };
1990 match file.disk_state() {
1991 DiskState::New => false,
1992 DiskState::Present { mtime } => match self.saved_mtime {
1993 Some(saved_mtime) => {
1994 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1995 }
1996 None => true,
1997 },
1998 DiskState::Deleted => false,
1999 }
2000 }
2001
2002 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2003 pub fn subscribe(&mut self) -> Subscription {
2004 self.text.subscribe()
2005 }
2006
2007 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2008 ///
2009 /// This allows downstream code to check if the buffer's text has changed without
2010 /// waiting for an effect cycle, which would be required if using eents.
2011 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2012 if let Err(ix) = self
2013 .change_bits
2014 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2015 {
2016 self.change_bits.insert(ix, bit);
2017 }
2018 }
2019
2020 fn was_changed(&mut self) {
2021 self.change_bits.retain(|change_bit| {
2022 change_bit.upgrade().is_some_and(|bit| {
2023 bit.replace(true);
2024 true
2025 })
2026 });
2027 }
2028
2029 /// Starts a transaction, if one is not already in-progress. When undoing or
2030 /// redoing edits, all of the edits performed within a transaction are undone
2031 /// or redone together.
2032 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2033 self.start_transaction_at(Instant::now())
2034 }
2035
2036 /// Starts a transaction, providing the current time. Subsequent transactions
2037 /// that occur within a short period of time will be grouped together. This
2038 /// is controlled by the buffer's undo grouping duration.
2039 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2040 self.transaction_depth += 1;
2041 if self.was_dirty_before_starting_transaction.is_none() {
2042 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2043 }
2044 self.text.start_transaction_at(now)
2045 }
2046
2047 /// Terminates the current transaction, if this is the outermost transaction.
2048 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2049 self.end_transaction_at(Instant::now(), cx)
2050 }
2051
2052 /// Terminates the current transaction, providing the current time. Subsequent transactions
2053 /// that occur within a short period of time will be grouped together. This
2054 /// is controlled by the buffer's undo grouping duration.
2055 pub fn end_transaction_at(
2056 &mut self,
2057 now: Instant,
2058 cx: &mut Context<Self>,
2059 ) -> Option<TransactionId> {
2060 assert!(self.transaction_depth > 0);
2061 self.transaction_depth -= 1;
2062 let was_dirty = if self.transaction_depth == 0 {
2063 self.was_dirty_before_starting_transaction.take().unwrap()
2064 } else {
2065 false
2066 };
2067 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2068 self.did_edit(&start_version, was_dirty, cx);
2069 Some(transaction_id)
2070 } else {
2071 None
2072 }
2073 }
2074
2075 /// Manually add a transaction to the buffer's undo history.
2076 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2077 self.text.push_transaction(transaction, now);
2078 }
2079
2080 /// Differs from `push_transaction` in that it does not clear the redo
2081 /// stack. Intended to be used to create a parent transaction to merge
2082 /// potential child transactions into.
2083 ///
2084 /// The caller is responsible for removing it from the undo history using
2085 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2086 /// are merged into this transaction, the caller is responsible for ensuring
2087 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2088 /// cleared is to create transactions with the usual `start_transaction` and
2089 /// `end_transaction` methods and merging the resulting transactions into
2090 /// the transaction created by this method
2091 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2092 self.text.push_empty_transaction(now)
2093 }
2094
2095 /// Prevent the last transaction from being grouped with any subsequent transactions,
2096 /// even if they occur with the buffer's undo grouping duration.
2097 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2098 self.text.finalize_last_transaction()
2099 }
2100
2101 /// Manually group all changes since a given transaction.
2102 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2103 self.text.group_until_transaction(transaction_id);
2104 }
2105
2106 /// Manually remove a transaction from the buffer's undo history
2107 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2108 self.text.forget_transaction(transaction_id)
2109 }
2110
2111 /// Retrieve a transaction from the buffer's undo history
2112 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2113 self.text.get_transaction(transaction_id)
2114 }
2115
2116 /// Manually merge two transactions in the buffer's undo history.
2117 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2118 self.text.merge_transactions(transaction, destination);
2119 }
2120
2121 /// Waits for the buffer to receive operations with the given timestamps.
2122 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2123 &mut self,
2124 edit_ids: It,
2125 ) -> impl Future<Output = Result<()>> + use<It> {
2126 self.text.wait_for_edits(edit_ids)
2127 }
2128
2129 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2130 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2131 &mut self,
2132 anchors: It,
2133 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2134 self.text.wait_for_anchors(anchors)
2135 }
2136
2137 /// Waits for the buffer to receive operations up to the given version.
2138 pub fn wait_for_version(
2139 &mut self,
2140 version: clock::Global,
2141 ) -> impl Future<Output = Result<()>> + use<> {
2142 self.text.wait_for_version(version)
2143 }
2144
2145 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2146 /// [`Buffer::wait_for_version`] to resolve with an error.
2147 pub fn give_up_waiting(&mut self) {
2148 self.text.give_up_waiting();
2149 }
2150
2151 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2152 let mut rx = None;
2153 if !self.autoindent_requests.is_empty() {
2154 let channel = oneshot::channel();
2155 self.wait_for_autoindent_txs.push(channel.0);
2156 rx = Some(channel.1);
2157 }
2158 rx
2159 }
2160
2161 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2162 pub fn set_active_selections(
2163 &mut self,
2164 selections: Arc<[Selection<Anchor>]>,
2165 line_mode: bool,
2166 cursor_shape: CursorShape,
2167 cx: &mut Context<Self>,
2168 ) {
2169 let lamport_timestamp = self.text.lamport_clock.tick();
2170 self.remote_selections.insert(
2171 self.text.replica_id(),
2172 SelectionSet {
2173 selections: selections.clone(),
2174 lamport_timestamp,
2175 line_mode,
2176 cursor_shape,
2177 },
2178 );
2179 self.send_operation(
2180 Operation::UpdateSelections {
2181 selections,
2182 line_mode,
2183 lamport_timestamp,
2184 cursor_shape,
2185 },
2186 true,
2187 cx,
2188 );
2189 self.non_text_state_update_count += 1;
2190 cx.notify();
2191 }
2192
2193 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2194 /// this replica.
2195 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2196 if self
2197 .remote_selections
2198 .get(&self.text.replica_id())
2199 .is_none_or(|set| !set.selections.is_empty())
2200 {
2201 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2202 }
2203 }
2204
2205 pub fn set_agent_selections(
2206 &mut self,
2207 selections: Arc<[Selection<Anchor>]>,
2208 line_mode: bool,
2209 cursor_shape: CursorShape,
2210 cx: &mut Context<Self>,
2211 ) {
2212 let lamport_timestamp = self.text.lamport_clock.tick();
2213 self.remote_selections.insert(
2214 AGENT_REPLICA_ID,
2215 SelectionSet {
2216 selections,
2217 lamport_timestamp,
2218 line_mode,
2219 cursor_shape,
2220 },
2221 );
2222 self.non_text_state_update_count += 1;
2223 cx.notify();
2224 }
2225
2226 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2227 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2228 }
2229
2230 /// Replaces the buffer's entire text.
2231 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2232 where
2233 T: Into<Arc<str>>,
2234 {
2235 self.autoindent_requests.clear();
2236 self.edit([(0..self.len(), text)], None, cx)
2237 }
2238
2239 /// Appends the given text to the end of the buffer.
2240 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2241 where
2242 T: Into<Arc<str>>,
2243 {
2244 self.edit([(self.len()..self.len(), text)], None, cx)
2245 }
2246
2247 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2248 /// delete, and a string of text to insert at that location.
2249 ///
2250 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2251 /// request for the edited ranges, which will be processed when the buffer finishes
2252 /// parsing.
2253 ///
2254 /// Parsing takes place at the end of a transaction, and may compute synchronously
2255 /// or asynchronously, depending on the changes.
2256 pub fn edit<I, S, T>(
2257 &mut self,
2258 edits_iter: I,
2259 autoindent_mode: Option<AutoindentMode>,
2260 cx: &mut Context<Self>,
2261 ) -> Option<clock::Lamport>
2262 where
2263 I: IntoIterator<Item = (Range<S>, T)>,
2264 S: ToOffset,
2265 T: Into<Arc<str>>,
2266 {
2267 // Skip invalid edits and coalesce contiguous ones.
2268 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2269
2270 for (range, new_text) in edits_iter {
2271 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2272
2273 if range.start > range.end {
2274 mem::swap(&mut range.start, &mut range.end);
2275 }
2276 let new_text = new_text.into();
2277 if !new_text.is_empty() || !range.is_empty() {
2278 if let Some((prev_range, prev_text)) = edits.last_mut()
2279 && prev_range.end >= range.start
2280 {
2281 prev_range.end = cmp::max(prev_range.end, range.end);
2282 *prev_text = format!("{prev_text}{new_text}").into();
2283 } else {
2284 edits.push((range, new_text));
2285 }
2286 }
2287 }
2288 if edits.is_empty() {
2289 return None;
2290 }
2291
2292 self.start_transaction();
2293 self.pending_autoindent.take();
2294 let autoindent_request = autoindent_mode
2295 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2296
2297 let edit_operation = self.text.edit(edits.iter().cloned());
2298 let edit_id = edit_operation.timestamp();
2299
2300 if let Some((before_edit, mode)) = autoindent_request {
2301 let mut delta = 0isize;
2302 let mut previous_setting = None;
2303 let entries: Vec<_> = edits
2304 .into_iter()
2305 .enumerate()
2306 .zip(&edit_operation.as_edit().unwrap().new_text)
2307 .filter(|((_, (range, _)), _)| {
2308 let language = before_edit.language_at(range.start);
2309 let language_id = language.map(|l| l.id());
2310 if let Some((cached_language_id, auto_indent)) = previous_setting
2311 && cached_language_id == language_id
2312 {
2313 auto_indent
2314 } else {
2315 // The auto-indent setting is not present in editorconfigs, hence
2316 // we can avoid passing the file here.
2317 let auto_indent =
2318 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2319 previous_setting = Some((language_id, auto_indent));
2320 auto_indent
2321 }
2322 })
2323 .map(|((ix, (range, _)), new_text)| {
2324 let new_text_length = new_text.len();
2325 let old_start = range.start.to_point(&before_edit);
2326 let new_start = (delta + range.start as isize) as usize;
2327 let range_len = range.end - range.start;
2328 delta += new_text_length as isize - range_len as isize;
2329
2330 // Decide what range of the insertion to auto-indent, and whether
2331 // the first line of the insertion should be considered a newly-inserted line
2332 // or an edit to an existing line.
2333 let mut range_of_insertion_to_indent = 0..new_text_length;
2334 let mut first_line_is_new = true;
2335
2336 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2337 let old_line_end = before_edit.line_len(old_start.row);
2338
2339 if old_start.column > old_line_start {
2340 first_line_is_new = false;
2341 }
2342
2343 if !new_text.contains('\n')
2344 && (old_start.column + (range_len as u32) < old_line_end
2345 || old_line_end == old_line_start)
2346 {
2347 first_line_is_new = false;
2348 }
2349
2350 // When inserting text starting with a newline, avoid auto-indenting the
2351 // previous line.
2352 if new_text.starts_with('\n') {
2353 range_of_insertion_to_indent.start += 1;
2354 first_line_is_new = true;
2355 }
2356
2357 let mut original_indent_column = None;
2358 if let AutoindentMode::Block {
2359 original_indent_columns,
2360 } = &mode
2361 {
2362 original_indent_column = Some(if new_text.starts_with('\n') {
2363 indent_size_for_text(
2364 new_text[range_of_insertion_to_indent.clone()].chars(),
2365 )
2366 .len
2367 } else {
2368 original_indent_columns
2369 .get(ix)
2370 .copied()
2371 .flatten()
2372 .unwrap_or_else(|| {
2373 indent_size_for_text(
2374 new_text[range_of_insertion_to_indent.clone()].chars(),
2375 )
2376 .len
2377 })
2378 });
2379
2380 // Avoid auto-indenting the line after the edit.
2381 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2382 range_of_insertion_to_indent.end -= 1;
2383 }
2384 }
2385
2386 AutoindentRequestEntry {
2387 first_line_is_new,
2388 original_indent_column,
2389 indent_size: before_edit.language_indent_size_at(range.start, cx),
2390 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2391 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2392 }
2393 })
2394 .collect();
2395
2396 if !entries.is_empty() {
2397 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2398 before_edit,
2399 entries,
2400 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2401 ignore_empty_lines: false,
2402 }));
2403 }
2404 }
2405
2406 self.end_transaction(cx);
2407 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2408 Some(edit_id)
2409 }
2410
2411 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2412 self.was_changed();
2413
2414 if self.edits_since::<usize>(old_version).next().is_none() {
2415 return;
2416 }
2417
2418 self.reparse(cx);
2419 cx.emit(BufferEvent::Edited);
2420 if was_dirty != self.is_dirty() {
2421 cx.emit(BufferEvent::DirtyChanged);
2422 }
2423 cx.notify();
2424 }
2425
2426 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2427 where
2428 I: IntoIterator<Item = Range<T>>,
2429 T: ToOffset + Copy,
2430 {
2431 let before_edit = self.snapshot();
2432 let entries = ranges
2433 .into_iter()
2434 .map(|range| AutoindentRequestEntry {
2435 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2436 first_line_is_new: true,
2437 indent_size: before_edit.language_indent_size_at(range.start, cx),
2438 original_indent_column: None,
2439 })
2440 .collect();
2441 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2442 before_edit,
2443 entries,
2444 is_block_mode: false,
2445 ignore_empty_lines: true,
2446 }));
2447 self.request_autoindent(cx);
2448 }
2449
2450 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2451 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2452 pub fn insert_empty_line(
2453 &mut self,
2454 position: impl ToPoint,
2455 space_above: bool,
2456 space_below: bool,
2457 cx: &mut Context<Self>,
2458 ) -> Point {
2459 let mut position = position.to_point(self);
2460
2461 self.start_transaction();
2462
2463 self.edit(
2464 [(position..position, "\n")],
2465 Some(AutoindentMode::EachLine),
2466 cx,
2467 );
2468
2469 if position.column > 0 {
2470 position += Point::new(1, 0);
2471 }
2472
2473 if !self.is_line_blank(position.row) {
2474 self.edit(
2475 [(position..position, "\n")],
2476 Some(AutoindentMode::EachLine),
2477 cx,
2478 );
2479 }
2480
2481 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2482 self.edit(
2483 [(position..position, "\n")],
2484 Some(AutoindentMode::EachLine),
2485 cx,
2486 );
2487 position.row += 1;
2488 }
2489
2490 if space_below
2491 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2492 {
2493 self.edit(
2494 [(position..position, "\n")],
2495 Some(AutoindentMode::EachLine),
2496 cx,
2497 );
2498 }
2499
2500 self.end_transaction(cx);
2501
2502 position
2503 }
2504
2505 /// Applies the given remote operations to the buffer.
2506 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2507 self.pending_autoindent.take();
2508 let was_dirty = self.is_dirty();
2509 let old_version = self.version.clone();
2510 let mut deferred_ops = Vec::new();
2511 let buffer_ops = ops
2512 .into_iter()
2513 .filter_map(|op| match op {
2514 Operation::Buffer(op) => Some(op),
2515 _ => {
2516 if self.can_apply_op(&op) {
2517 self.apply_op(op, cx);
2518 } else {
2519 deferred_ops.push(op);
2520 }
2521 None
2522 }
2523 })
2524 .collect::<Vec<_>>();
2525 for operation in buffer_ops.iter() {
2526 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2527 }
2528 self.text.apply_ops(buffer_ops);
2529 self.deferred_ops.insert(deferred_ops);
2530 self.flush_deferred_ops(cx);
2531 self.did_edit(&old_version, was_dirty, cx);
2532 // Notify independently of whether the buffer was edited as the operations could include a
2533 // selection update.
2534 cx.notify();
2535 }
2536
2537 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2538 let mut deferred_ops = Vec::new();
2539 for op in self.deferred_ops.drain().iter().cloned() {
2540 if self.can_apply_op(&op) {
2541 self.apply_op(op, cx);
2542 } else {
2543 deferred_ops.push(op);
2544 }
2545 }
2546 self.deferred_ops.insert(deferred_ops);
2547 }
2548
2549 pub fn has_deferred_ops(&self) -> bool {
2550 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2551 }
2552
2553 fn can_apply_op(&self, operation: &Operation) -> bool {
2554 match operation {
2555 Operation::Buffer(_) => {
2556 unreachable!("buffer operations should never be applied at this layer")
2557 }
2558 Operation::UpdateDiagnostics {
2559 diagnostics: diagnostic_set,
2560 ..
2561 } => diagnostic_set.iter().all(|diagnostic| {
2562 self.text.can_resolve(&diagnostic.range.start)
2563 && self.text.can_resolve(&diagnostic.range.end)
2564 }),
2565 Operation::UpdateSelections { selections, .. } => selections
2566 .iter()
2567 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2568 Operation::UpdateCompletionTriggers { .. } => true,
2569 }
2570 }
2571
2572 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2573 match operation {
2574 Operation::Buffer(_) => {
2575 unreachable!("buffer operations should never be applied at this layer")
2576 }
2577 Operation::UpdateDiagnostics {
2578 server_id,
2579 diagnostics: diagnostic_set,
2580 lamport_timestamp,
2581 } => {
2582 let snapshot = self.snapshot();
2583 self.apply_diagnostic_update(
2584 server_id,
2585 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2586 lamport_timestamp,
2587 cx,
2588 );
2589 }
2590 Operation::UpdateSelections {
2591 selections,
2592 lamport_timestamp,
2593 line_mode,
2594 cursor_shape,
2595 } => {
2596 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2597 && set.lamport_timestamp > lamport_timestamp
2598 {
2599 return;
2600 }
2601
2602 self.remote_selections.insert(
2603 lamport_timestamp.replica_id,
2604 SelectionSet {
2605 selections,
2606 lamport_timestamp,
2607 line_mode,
2608 cursor_shape,
2609 },
2610 );
2611 self.text.lamport_clock.observe(lamport_timestamp);
2612 self.non_text_state_update_count += 1;
2613 }
2614 Operation::UpdateCompletionTriggers {
2615 triggers,
2616 lamport_timestamp,
2617 server_id,
2618 } => {
2619 if triggers.is_empty() {
2620 self.completion_triggers_per_language_server
2621 .remove(&server_id);
2622 self.completion_triggers = self
2623 .completion_triggers_per_language_server
2624 .values()
2625 .flat_map(|triggers| triggers.iter().cloned())
2626 .collect();
2627 } else {
2628 self.completion_triggers_per_language_server
2629 .insert(server_id, triggers.iter().cloned().collect());
2630 self.completion_triggers.extend(triggers);
2631 }
2632 self.text.lamport_clock.observe(lamport_timestamp);
2633 }
2634 }
2635 }
2636
2637 fn apply_diagnostic_update(
2638 &mut self,
2639 server_id: LanguageServerId,
2640 diagnostics: DiagnosticSet,
2641 lamport_timestamp: clock::Lamport,
2642 cx: &mut Context<Self>,
2643 ) {
2644 if lamport_timestamp > self.diagnostics_timestamp {
2645 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2646 if diagnostics.is_empty() {
2647 if let Ok(ix) = ix {
2648 self.diagnostics.remove(ix);
2649 }
2650 } else {
2651 match ix {
2652 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2653 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2654 };
2655 }
2656 self.diagnostics_timestamp = lamport_timestamp;
2657 self.non_text_state_update_count += 1;
2658 self.text.lamport_clock.observe(lamport_timestamp);
2659 cx.notify();
2660 cx.emit(BufferEvent::DiagnosticsUpdated);
2661 }
2662 }
2663
2664 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2665 self.was_changed();
2666 cx.emit(BufferEvent::Operation {
2667 operation,
2668 is_local,
2669 });
2670 }
2671
2672 /// Removes the selections for a given peer.
2673 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2674 self.remote_selections.remove(&replica_id);
2675 cx.notify();
2676 }
2677
2678 /// Undoes the most recent transaction.
2679 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2680 let was_dirty = self.is_dirty();
2681 let old_version = self.version.clone();
2682
2683 if let Some((transaction_id, operation)) = self.text.undo() {
2684 self.send_operation(Operation::Buffer(operation), true, cx);
2685 self.did_edit(&old_version, was_dirty, cx);
2686 Some(transaction_id)
2687 } else {
2688 None
2689 }
2690 }
2691
2692 /// Manually undoes a specific transaction in the buffer's undo history.
2693 pub fn undo_transaction(
2694 &mut self,
2695 transaction_id: TransactionId,
2696 cx: &mut Context<Self>,
2697 ) -> bool {
2698 let was_dirty = self.is_dirty();
2699 let old_version = self.version.clone();
2700 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2701 self.send_operation(Operation::Buffer(operation), true, cx);
2702 self.did_edit(&old_version, was_dirty, cx);
2703 true
2704 } else {
2705 false
2706 }
2707 }
2708
2709 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2710 pub fn undo_to_transaction(
2711 &mut self,
2712 transaction_id: TransactionId,
2713 cx: &mut Context<Self>,
2714 ) -> bool {
2715 let was_dirty = self.is_dirty();
2716 let old_version = self.version.clone();
2717
2718 let operations = self.text.undo_to_transaction(transaction_id);
2719 let undone = !operations.is_empty();
2720 for operation in operations {
2721 self.send_operation(Operation::Buffer(operation), true, cx);
2722 }
2723 if undone {
2724 self.did_edit(&old_version, was_dirty, cx)
2725 }
2726 undone
2727 }
2728
2729 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2730 let was_dirty = self.is_dirty();
2731 let operation = self.text.undo_operations(counts);
2732 let old_version = self.version.clone();
2733 self.send_operation(Operation::Buffer(operation), true, cx);
2734 self.did_edit(&old_version, was_dirty, cx);
2735 }
2736
2737 /// Manually redoes a specific transaction in the buffer's redo history.
2738 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2739 let was_dirty = self.is_dirty();
2740 let old_version = self.version.clone();
2741
2742 if let Some((transaction_id, operation)) = self.text.redo() {
2743 self.send_operation(Operation::Buffer(operation), true, cx);
2744 self.did_edit(&old_version, was_dirty, cx);
2745 Some(transaction_id)
2746 } else {
2747 None
2748 }
2749 }
2750
2751 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2752 pub fn redo_to_transaction(
2753 &mut self,
2754 transaction_id: TransactionId,
2755 cx: &mut Context<Self>,
2756 ) -> bool {
2757 let was_dirty = self.is_dirty();
2758 let old_version = self.version.clone();
2759
2760 let operations = self.text.redo_to_transaction(transaction_id);
2761 let redone = !operations.is_empty();
2762 for operation in operations {
2763 self.send_operation(Operation::Buffer(operation), true, cx);
2764 }
2765 if redone {
2766 self.did_edit(&old_version, was_dirty, cx)
2767 }
2768 redone
2769 }
2770
2771 /// Override current completion triggers with the user-provided completion triggers.
2772 pub fn set_completion_triggers(
2773 &mut self,
2774 server_id: LanguageServerId,
2775 triggers: BTreeSet<String>,
2776 cx: &mut Context<Self>,
2777 ) {
2778 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2779 if triggers.is_empty() {
2780 self.completion_triggers_per_language_server
2781 .remove(&server_id);
2782 self.completion_triggers = self
2783 .completion_triggers_per_language_server
2784 .values()
2785 .flat_map(|triggers| triggers.iter().cloned())
2786 .collect();
2787 } else {
2788 self.completion_triggers_per_language_server
2789 .insert(server_id, triggers.clone());
2790 self.completion_triggers.extend(triggers.iter().cloned());
2791 }
2792 self.send_operation(
2793 Operation::UpdateCompletionTriggers {
2794 triggers: triggers.into_iter().collect(),
2795 lamport_timestamp: self.completion_triggers_timestamp,
2796 server_id,
2797 },
2798 true,
2799 cx,
2800 );
2801 cx.notify();
2802 }
2803
2804 /// Returns a list of strings which trigger a completion menu for this language.
2805 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2806 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2807 &self.completion_triggers
2808 }
2809
2810 /// Call this directly after performing edits to prevent the preview tab
2811 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2812 /// to return false until there are additional edits.
2813 pub fn refresh_preview(&mut self) {
2814 self.preview_version = self.version.clone();
2815 }
2816
2817 /// Whether we should preserve the preview status of a tab containing this buffer.
2818 pub fn preserve_preview(&self) -> bool {
2819 !self.has_edits_since(&self.preview_version)
2820 }
2821}
2822
2823#[doc(hidden)]
2824#[cfg(any(test, feature = "test-support"))]
2825impl Buffer {
2826 pub fn edit_via_marked_text(
2827 &mut self,
2828 marked_string: &str,
2829 autoindent_mode: Option<AutoindentMode>,
2830 cx: &mut Context<Self>,
2831 ) {
2832 let edits = self.edits_for_marked_text(marked_string);
2833 self.edit(edits, autoindent_mode, cx);
2834 }
2835
2836 pub fn set_group_interval(&mut self, group_interval: Duration) {
2837 self.text.set_group_interval(group_interval);
2838 }
2839
2840 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2841 where
2842 T: rand::Rng,
2843 {
2844 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2845 let mut last_end = None;
2846 for _ in 0..old_range_count {
2847 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2848 break;
2849 }
2850
2851 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2852 let mut range = self.random_byte_range(new_start, rng);
2853 if rng.gen_bool(0.2) {
2854 mem::swap(&mut range.start, &mut range.end);
2855 }
2856 last_end = Some(range.end);
2857
2858 let new_text_len = rng.gen_range(0..10);
2859 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2860 new_text = new_text.to_uppercase();
2861
2862 edits.push((range, new_text));
2863 }
2864 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2865 self.edit(edits, None, cx);
2866 }
2867
2868 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2869 let was_dirty = self.is_dirty();
2870 let old_version = self.version.clone();
2871
2872 let ops = self.text.randomly_undo_redo(rng);
2873 if !ops.is_empty() {
2874 for op in ops {
2875 self.send_operation(Operation::Buffer(op), true, cx);
2876 self.did_edit(&old_version, was_dirty, cx);
2877 }
2878 }
2879 }
2880}
2881
2882impl EventEmitter<BufferEvent> for Buffer {}
2883
2884impl Deref for Buffer {
2885 type Target = TextBuffer;
2886
2887 fn deref(&self) -> &Self::Target {
2888 &self.text
2889 }
2890}
2891
2892impl BufferSnapshot {
2893 /// Returns [`IndentSize`] for a given line that respects user settings and
2894 /// language preferences.
2895 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2896 indent_size_for_line(self, row)
2897 }
2898
2899 /// Returns [`IndentSize`] for a given position that respects user settings
2900 /// and language preferences.
2901 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2902 let settings = language_settings(
2903 self.language_at(position).map(|l| l.name()),
2904 self.file(),
2905 cx,
2906 );
2907 if settings.hard_tabs {
2908 IndentSize::tab()
2909 } else {
2910 IndentSize::spaces(settings.tab_size.get())
2911 }
2912 }
2913
2914 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2915 /// is passed in as `single_indent_size`.
2916 pub fn suggested_indents(
2917 &self,
2918 rows: impl Iterator<Item = u32>,
2919 single_indent_size: IndentSize,
2920 ) -> BTreeMap<u32, IndentSize> {
2921 let mut result = BTreeMap::new();
2922
2923 for row_range in contiguous_ranges(rows, 10) {
2924 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2925 Some(suggestions) => suggestions,
2926 _ => break,
2927 };
2928
2929 for (row, suggestion) in row_range.zip(suggestions) {
2930 let indent_size = if let Some(suggestion) = suggestion {
2931 result
2932 .get(&suggestion.basis_row)
2933 .copied()
2934 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2935 .with_delta(suggestion.delta, single_indent_size)
2936 } else {
2937 self.indent_size_for_line(row)
2938 };
2939
2940 result.insert(row, indent_size);
2941 }
2942 }
2943
2944 result
2945 }
2946
2947 fn suggest_autoindents(
2948 &self,
2949 row_range: Range<u32>,
2950 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2951 let config = &self.language.as_ref()?.config;
2952 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2953
2954 #[derive(Debug, Clone)]
2955 struct StartPosition {
2956 start: Point,
2957 suffix: SharedString,
2958 }
2959
2960 // Find the suggested indentation ranges based on the syntax tree.
2961 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2962 let end = Point::new(row_range.end, 0);
2963 let range = (start..end).to_offset(&self.text);
2964 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2965 Some(&grammar.indents_config.as_ref()?.query)
2966 });
2967 let indent_configs = matches
2968 .grammars()
2969 .iter()
2970 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2971 .collect::<Vec<_>>();
2972
2973 let mut indent_ranges = Vec::<Range<Point>>::new();
2974 let mut start_positions = Vec::<StartPosition>::new();
2975 let mut outdent_positions = Vec::<Point>::new();
2976 while let Some(mat) = matches.peek() {
2977 let mut start: Option<Point> = None;
2978 let mut end: Option<Point> = None;
2979
2980 let config = indent_configs[mat.grammar_index];
2981 for capture in mat.captures {
2982 if capture.index == config.indent_capture_ix {
2983 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2984 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2985 } else if Some(capture.index) == config.start_capture_ix {
2986 start = Some(Point::from_ts_point(capture.node.end_position()));
2987 } else if Some(capture.index) == config.end_capture_ix {
2988 end = Some(Point::from_ts_point(capture.node.start_position()));
2989 } else if Some(capture.index) == config.outdent_capture_ix {
2990 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2991 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2992 start_positions.push(StartPosition {
2993 start: Point::from_ts_point(capture.node.start_position()),
2994 suffix: suffix.clone(),
2995 });
2996 }
2997 }
2998
2999 matches.advance();
3000 if let Some((start, end)) = start.zip(end) {
3001 if start.row == end.row {
3002 continue;
3003 }
3004 let range = start..end;
3005 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3006 Err(ix) => indent_ranges.insert(ix, range),
3007 Ok(ix) => {
3008 let prev_range = &mut indent_ranges[ix];
3009 prev_range.end = prev_range.end.max(range.end);
3010 }
3011 }
3012 }
3013 }
3014
3015 let mut error_ranges = Vec::<Range<Point>>::new();
3016 let mut matches = self
3017 .syntax
3018 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3019 while let Some(mat) = matches.peek() {
3020 let node = mat.captures[0].node;
3021 let start = Point::from_ts_point(node.start_position());
3022 let end = Point::from_ts_point(node.end_position());
3023 let range = start..end;
3024 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3025 Ok(ix) | Err(ix) => ix,
3026 };
3027 let mut end_ix = ix;
3028 while let Some(existing_range) = error_ranges.get(end_ix) {
3029 if existing_range.end < end {
3030 end_ix += 1;
3031 } else {
3032 break;
3033 }
3034 }
3035 error_ranges.splice(ix..end_ix, [range]);
3036 matches.advance();
3037 }
3038
3039 outdent_positions.sort();
3040 for outdent_position in outdent_positions {
3041 // find the innermost indent range containing this outdent_position
3042 // set its end to the outdent position
3043 if let Some(range_to_truncate) = indent_ranges
3044 .iter_mut()
3045 .filter(|indent_range| indent_range.contains(&outdent_position))
3046 .next_back()
3047 {
3048 range_to_truncate.end = outdent_position;
3049 }
3050 }
3051
3052 start_positions.sort_by_key(|b| b.start);
3053
3054 // Find the suggested indentation increases and decreased based on regexes.
3055 let mut regex_outdent_map = HashMap::default();
3056 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3057 let mut start_positions_iter = start_positions.iter().peekable();
3058
3059 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3060 self.for_each_line(
3061 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3062 ..Point::new(row_range.end, 0),
3063 |row, line| {
3064 if config
3065 .decrease_indent_pattern
3066 .as_ref()
3067 .is_some_and(|regex| regex.is_match(line))
3068 {
3069 indent_change_rows.push((row, Ordering::Less));
3070 }
3071 if config
3072 .increase_indent_pattern
3073 .as_ref()
3074 .is_some_and(|regex| regex.is_match(line))
3075 {
3076 indent_change_rows.push((row + 1, Ordering::Greater));
3077 }
3078 while let Some(pos) = start_positions_iter.peek() {
3079 if pos.start.row < row {
3080 let pos = start_positions_iter.next().unwrap();
3081 last_seen_suffix
3082 .entry(pos.suffix.to_string())
3083 .or_default()
3084 .push(pos.start);
3085 } else {
3086 break;
3087 }
3088 }
3089 for rule in &config.decrease_indent_patterns {
3090 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3091 let row_start_column = self.indent_size_for_line(row).len;
3092 let basis_row = rule
3093 .valid_after
3094 .iter()
3095 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3096 .flatten()
3097 .filter(|start_point| start_point.column <= row_start_column)
3098 .max_by_key(|start_point| start_point.row);
3099 if let Some(outdent_to_row) = basis_row {
3100 regex_outdent_map.insert(row, outdent_to_row.row);
3101 }
3102 break;
3103 }
3104 }
3105 },
3106 );
3107
3108 let mut indent_changes = indent_change_rows.into_iter().peekable();
3109 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3110 prev_non_blank_row.unwrap_or(0)
3111 } else {
3112 row_range.start.saturating_sub(1)
3113 };
3114
3115 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3116 Some(row_range.map(move |row| {
3117 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3118
3119 let mut indent_from_prev_row = false;
3120 let mut outdent_from_prev_row = false;
3121 let mut outdent_to_row = u32::MAX;
3122 let mut from_regex = false;
3123
3124 while let Some((indent_row, delta)) = indent_changes.peek() {
3125 match indent_row.cmp(&row) {
3126 Ordering::Equal => match delta {
3127 Ordering::Less => {
3128 from_regex = true;
3129 outdent_from_prev_row = true
3130 }
3131 Ordering::Greater => {
3132 indent_from_prev_row = true;
3133 from_regex = true
3134 }
3135 _ => {}
3136 },
3137
3138 Ordering::Greater => break,
3139 Ordering::Less => {}
3140 }
3141
3142 indent_changes.next();
3143 }
3144
3145 for range in &indent_ranges {
3146 if range.start.row >= row {
3147 break;
3148 }
3149 if range.start.row == prev_row && range.end > row_start {
3150 indent_from_prev_row = true;
3151 }
3152 if range.end > prev_row_start && range.end <= row_start {
3153 outdent_to_row = outdent_to_row.min(range.start.row);
3154 }
3155 }
3156
3157 if let Some(basis_row) = regex_outdent_map.get(&row) {
3158 indent_from_prev_row = false;
3159 outdent_to_row = *basis_row;
3160 from_regex = true;
3161 }
3162
3163 let within_error = error_ranges
3164 .iter()
3165 .any(|e| e.start.row < row && e.end > row_start);
3166
3167 let suggestion = if outdent_to_row == prev_row
3168 || (outdent_from_prev_row && indent_from_prev_row)
3169 {
3170 Some(IndentSuggestion {
3171 basis_row: prev_row,
3172 delta: Ordering::Equal,
3173 within_error: within_error && !from_regex,
3174 })
3175 } else if indent_from_prev_row {
3176 Some(IndentSuggestion {
3177 basis_row: prev_row,
3178 delta: Ordering::Greater,
3179 within_error: within_error && !from_regex,
3180 })
3181 } else if outdent_to_row < prev_row {
3182 Some(IndentSuggestion {
3183 basis_row: outdent_to_row,
3184 delta: Ordering::Equal,
3185 within_error: within_error && !from_regex,
3186 })
3187 } else if outdent_from_prev_row {
3188 Some(IndentSuggestion {
3189 basis_row: prev_row,
3190 delta: Ordering::Less,
3191 within_error: within_error && !from_regex,
3192 })
3193 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3194 {
3195 Some(IndentSuggestion {
3196 basis_row: prev_row,
3197 delta: Ordering::Equal,
3198 within_error: within_error && !from_regex,
3199 })
3200 } else {
3201 None
3202 };
3203
3204 prev_row = row;
3205 prev_row_start = row_start;
3206 suggestion
3207 }))
3208 }
3209
3210 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3211 while row > 0 {
3212 row -= 1;
3213 if !self.is_line_blank(row) {
3214 return Some(row);
3215 }
3216 }
3217 None
3218 }
3219
3220 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3221 let captures = self.syntax.captures(range, &self.text, |grammar| {
3222 grammar.highlights_query.as_ref()
3223 });
3224 let highlight_maps = captures
3225 .grammars()
3226 .iter()
3227 .map(|grammar| grammar.highlight_map())
3228 .collect();
3229 (captures, highlight_maps)
3230 }
3231
3232 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3233 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3234 /// returned in chunks where each chunk has a single syntax highlighting style and
3235 /// diagnostic status.
3236 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3237 let range = range.start.to_offset(self)..range.end.to_offset(self);
3238
3239 let mut syntax = None;
3240 if language_aware {
3241 syntax = Some(self.get_highlights(range.clone()));
3242 }
3243 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3244 let diagnostics = language_aware;
3245 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3246 }
3247
3248 pub fn highlighted_text_for_range<T: ToOffset>(
3249 &self,
3250 range: Range<T>,
3251 override_style: Option<HighlightStyle>,
3252 syntax_theme: &SyntaxTheme,
3253 ) -> HighlightedText {
3254 HighlightedText::from_buffer_range(
3255 range,
3256 &self.text,
3257 &self.syntax,
3258 override_style,
3259 syntax_theme,
3260 )
3261 }
3262
3263 /// Invokes the given callback for each line of text in the given range of the buffer.
3264 /// Uses callback to avoid allocating a string for each line.
3265 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3266 let mut line = String::new();
3267 let mut row = range.start.row;
3268 for chunk in self
3269 .as_rope()
3270 .chunks_in_range(range.to_offset(self))
3271 .chain(["\n"])
3272 {
3273 for (newline_ix, text) in chunk.split('\n').enumerate() {
3274 if newline_ix > 0 {
3275 callback(row, &line);
3276 row += 1;
3277 line.clear();
3278 }
3279 line.push_str(text);
3280 }
3281 }
3282 }
3283
3284 /// Iterates over every [`SyntaxLayer`] in the buffer.
3285 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3286 self.syntax
3287 .layers_for_range(0..self.len(), &self.text, true)
3288 }
3289
3290 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3291 let offset = position.to_offset(self);
3292 self.syntax
3293 .layers_for_range(offset..offset, &self.text, false)
3294 .filter(|l| l.node().end_byte() > offset)
3295 .last()
3296 }
3297
3298 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3299 &self,
3300 range: Range<D>,
3301 ) -> Option<SyntaxLayer<'_>> {
3302 let range = range.to_offset(self);
3303 self.syntax
3304 .layers_for_range(range, &self.text, false)
3305 .max_by(|a, b| {
3306 if a.depth != b.depth {
3307 a.depth.cmp(&b.depth)
3308 } else if a.offset.0 != b.offset.0 {
3309 a.offset.0.cmp(&b.offset.0)
3310 } else {
3311 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3312 }
3313 })
3314 }
3315
3316 /// Returns the main [`Language`].
3317 pub fn language(&self) -> Option<&Arc<Language>> {
3318 self.language.as_ref()
3319 }
3320
3321 /// Returns the [`Language`] at the given location.
3322 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3323 self.syntax_layer_at(position)
3324 .map(|info| info.language)
3325 .or(self.language.as_ref())
3326 }
3327
3328 /// Returns the settings for the language at the given location.
3329 pub fn settings_at<'a, D: ToOffset>(
3330 &'a self,
3331 position: D,
3332 cx: &'a App,
3333 ) -> Cow<'a, LanguageSettings> {
3334 language_settings(
3335 self.language_at(position).map(|l| l.name()),
3336 self.file.as_ref(),
3337 cx,
3338 )
3339 }
3340
3341 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3342 CharClassifier::new(self.language_scope_at(point))
3343 }
3344
3345 /// Returns the [`LanguageScope`] at the given location.
3346 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3347 let offset = position.to_offset(self);
3348 let mut scope = None;
3349 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3350
3351 // Use the layer that has the smallest node intersecting the given point.
3352 for layer in self
3353 .syntax
3354 .layers_for_range(offset..offset, &self.text, false)
3355 {
3356 let mut cursor = layer.node().walk();
3357
3358 let mut range = None;
3359 loop {
3360 let child_range = cursor.node().byte_range();
3361 if !child_range.contains(&offset) {
3362 break;
3363 }
3364
3365 range = Some(child_range);
3366 if cursor.goto_first_child_for_byte(offset).is_none() {
3367 break;
3368 }
3369 }
3370
3371 if let Some(range) = range
3372 && smallest_range_and_depth.as_ref().is_none_or(
3373 |(smallest_range, smallest_range_depth)| {
3374 if layer.depth > *smallest_range_depth {
3375 true
3376 } else if layer.depth == *smallest_range_depth {
3377 range.len() < smallest_range.len()
3378 } else {
3379 false
3380 }
3381 },
3382 )
3383 {
3384 smallest_range_and_depth = Some((range, layer.depth));
3385 scope = Some(LanguageScope {
3386 language: layer.language.clone(),
3387 override_id: layer.override_id(offset, &self.text),
3388 });
3389 }
3390 }
3391
3392 scope.or_else(|| {
3393 self.language.clone().map(|language| LanguageScope {
3394 language,
3395 override_id: None,
3396 })
3397 })
3398 }
3399
3400 /// Returns a tuple of the range and character kind of the word
3401 /// surrounding the given position.
3402 pub fn surrounding_word<T: ToOffset>(
3403 &self,
3404 start: T,
3405 for_completion: bool,
3406 ) -> (Range<usize>, Option<CharKind>) {
3407 let mut start = start.to_offset(self);
3408 let mut end = start;
3409 let mut next_chars = self.chars_at(start).take(128).peekable();
3410 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3411
3412 let classifier = self
3413 .char_classifier_at(start)
3414 .for_completion(for_completion);
3415 let word_kind = cmp::max(
3416 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3417 next_chars.peek().copied().map(|c| classifier.kind(c)),
3418 );
3419
3420 for ch in prev_chars {
3421 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3422 start -= ch.len_utf8();
3423 } else {
3424 break;
3425 }
3426 }
3427
3428 for ch in next_chars {
3429 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3430 end += ch.len_utf8();
3431 } else {
3432 break;
3433 }
3434 }
3435
3436 (start..end, word_kind)
3437 }
3438
3439 /// Returns the closest syntax node enclosing the given range.
3440 pub fn syntax_ancestor<'a, T: ToOffset>(
3441 &'a self,
3442 range: Range<T>,
3443 ) -> Option<tree_sitter::Node<'a>> {
3444 let range = range.start.to_offset(self)..range.end.to_offset(self);
3445 let mut result: Option<tree_sitter::Node<'a>> = None;
3446 'outer: for layer in self
3447 .syntax
3448 .layers_for_range(range.clone(), &self.text, true)
3449 {
3450 let mut cursor = layer.node().walk();
3451
3452 // Descend to the first leaf that touches the start of the range.
3453 //
3454 // If the range is non-empty and the current node ends exactly at the start,
3455 // move to the next sibling to find a node that extends beyond the start.
3456 //
3457 // If the range is empty and the current node starts after the range position,
3458 // move to the previous sibling to find the node that contains the position.
3459 while cursor.goto_first_child_for_byte(range.start).is_some() {
3460 if !range.is_empty() && cursor.node().end_byte() == range.start {
3461 cursor.goto_next_sibling();
3462 }
3463 if range.is_empty() && cursor.node().start_byte() > range.start {
3464 cursor.goto_previous_sibling();
3465 }
3466 }
3467
3468 // Ascend to the smallest ancestor that strictly contains the range.
3469 loop {
3470 let node_range = cursor.node().byte_range();
3471 if node_range.start <= range.start
3472 && node_range.end >= range.end
3473 && node_range.len() > range.len()
3474 {
3475 break;
3476 }
3477 if !cursor.goto_parent() {
3478 continue 'outer;
3479 }
3480 }
3481
3482 let left_node = cursor.node();
3483 let mut layer_result = left_node;
3484
3485 // For an empty range, try to find another node immediately to the right of the range.
3486 if left_node.end_byte() == range.start {
3487 let mut right_node = None;
3488 while !cursor.goto_next_sibling() {
3489 if !cursor.goto_parent() {
3490 break;
3491 }
3492 }
3493
3494 while cursor.node().start_byte() == range.start {
3495 right_node = Some(cursor.node());
3496 if !cursor.goto_first_child() {
3497 break;
3498 }
3499 }
3500
3501 // If there is a candidate node on both sides of the (empty) range, then
3502 // decide between the two by favoring a named node over an anonymous token.
3503 // If both nodes are the same in that regard, favor the right one.
3504 if let Some(right_node) = right_node
3505 && (right_node.is_named() || !left_node.is_named())
3506 {
3507 layer_result = right_node;
3508 }
3509 }
3510
3511 if let Some(previous_result) = &result
3512 && previous_result.byte_range().len() < layer_result.byte_range().len()
3513 {
3514 continue;
3515 }
3516 result = Some(layer_result);
3517 }
3518
3519 result
3520 }
3521
3522 /// Returns the root syntax node within the given row
3523 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3524 let start_offset = position.to_offset(self);
3525
3526 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3527
3528 let layer = self
3529 .syntax
3530 .layers_for_range(start_offset..start_offset, &self.text, true)
3531 .next()?;
3532
3533 let mut cursor = layer.node().walk();
3534
3535 // Descend to the first leaf that touches the start of the range.
3536 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3537 if cursor.node().end_byte() == start_offset {
3538 cursor.goto_next_sibling();
3539 }
3540 }
3541
3542 // Ascend to the root node within the same row.
3543 while cursor.goto_parent() {
3544 if cursor.node().start_position().row != row {
3545 break;
3546 }
3547 }
3548
3549 Some(cursor.node())
3550 }
3551
3552 /// Returns the outline for the buffer.
3553 ///
3554 /// This method allows passing an optional [`SyntaxTheme`] to
3555 /// syntax-highlight the returned symbols.
3556 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3557 self.outline_items_containing(0..self.len(), true, theme)
3558 .map(Outline::new)
3559 }
3560
3561 /// Returns all the symbols that contain the given position.
3562 ///
3563 /// This method allows passing an optional [`SyntaxTheme`] to
3564 /// syntax-highlight the returned symbols.
3565 pub fn symbols_containing<T: ToOffset>(
3566 &self,
3567 position: T,
3568 theme: Option<&SyntaxTheme>,
3569 ) -> Option<Vec<OutlineItem<Anchor>>> {
3570 let position = position.to_offset(self);
3571 let mut items = self.outline_items_containing(
3572 position.saturating_sub(1)..self.len().min(position + 1),
3573 false,
3574 theme,
3575 )?;
3576 let mut prev_depth = None;
3577 items.retain(|item| {
3578 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3579 prev_depth = Some(item.depth);
3580 result
3581 });
3582 Some(items)
3583 }
3584
3585 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3586 let range = range.to_offset(self);
3587 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3588 grammar.outline_config.as_ref().map(|c| &c.query)
3589 });
3590 let configs = matches
3591 .grammars()
3592 .iter()
3593 .map(|g| g.outline_config.as_ref().unwrap())
3594 .collect::<Vec<_>>();
3595
3596 while let Some(mat) = matches.peek() {
3597 let config = &configs[mat.grammar_index];
3598 let containing_item_node = maybe!({
3599 let item_node = mat.captures.iter().find_map(|cap| {
3600 if cap.index == config.item_capture_ix {
3601 Some(cap.node)
3602 } else {
3603 None
3604 }
3605 })?;
3606
3607 let item_byte_range = item_node.byte_range();
3608 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3609 None
3610 } else {
3611 Some(item_node)
3612 }
3613 });
3614
3615 if let Some(item_node) = containing_item_node {
3616 return Some(
3617 Point::from_ts_point(item_node.start_position())
3618 ..Point::from_ts_point(item_node.end_position()),
3619 );
3620 }
3621
3622 matches.advance();
3623 }
3624 None
3625 }
3626
3627 pub fn outline_items_containing<T: ToOffset>(
3628 &self,
3629 range: Range<T>,
3630 include_extra_context: bool,
3631 theme: Option<&SyntaxTheme>,
3632 ) -> Option<Vec<OutlineItem<Anchor>>> {
3633 let range = range.to_offset(self);
3634 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3635 grammar.outline_config.as_ref().map(|c| &c.query)
3636 });
3637 let configs = matches
3638 .grammars()
3639 .iter()
3640 .map(|g| g.outline_config.as_ref().unwrap())
3641 .collect::<Vec<_>>();
3642
3643 let mut items = Vec::new();
3644 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3645 while let Some(mat) = matches.peek() {
3646 let config = &configs[mat.grammar_index];
3647 if let Some(item) =
3648 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3649 {
3650 items.push(item);
3651 } else if let Some(capture) = mat
3652 .captures
3653 .iter()
3654 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3655 {
3656 let capture_range = capture.node.start_position()..capture.node.end_position();
3657 let mut capture_row_range =
3658 capture_range.start.row as u32..capture_range.end.row as u32;
3659 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3660 {
3661 capture_row_range.end -= 1;
3662 }
3663 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3664 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3665 last_row_range.end = capture_row_range.end;
3666 } else {
3667 annotation_row_ranges.push(capture_row_range);
3668 }
3669 } else {
3670 annotation_row_ranges.push(capture_row_range);
3671 }
3672 }
3673 matches.advance();
3674 }
3675
3676 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3677
3678 // Assign depths based on containment relationships and convert to anchors.
3679 let mut item_ends_stack = Vec::<Point>::new();
3680 let mut anchor_items = Vec::new();
3681 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3682 for item in items {
3683 while let Some(last_end) = item_ends_stack.last().copied() {
3684 if last_end < item.range.end {
3685 item_ends_stack.pop();
3686 } else {
3687 break;
3688 }
3689 }
3690
3691 let mut annotation_row_range = None;
3692 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3693 let row_preceding_item = item.range.start.row.saturating_sub(1);
3694 if next_annotation_row_range.end < row_preceding_item {
3695 annotation_row_ranges.next();
3696 } else {
3697 if next_annotation_row_range.end == row_preceding_item {
3698 annotation_row_range = Some(next_annotation_row_range.clone());
3699 annotation_row_ranges.next();
3700 }
3701 break;
3702 }
3703 }
3704
3705 anchor_items.push(OutlineItem {
3706 depth: item_ends_stack.len(),
3707 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3708 text: item.text,
3709 highlight_ranges: item.highlight_ranges,
3710 name_ranges: item.name_ranges,
3711 body_range: item.body_range.map(|body_range| {
3712 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3713 }),
3714 annotation_range: annotation_row_range.map(|annotation_range| {
3715 self.anchor_after(Point::new(annotation_range.start, 0))
3716 ..self.anchor_before(Point::new(
3717 annotation_range.end,
3718 self.line_len(annotation_range.end),
3719 ))
3720 }),
3721 });
3722 item_ends_stack.push(item.range.end);
3723 }
3724
3725 Some(anchor_items)
3726 }
3727
3728 fn next_outline_item(
3729 &self,
3730 config: &OutlineConfig,
3731 mat: &SyntaxMapMatch,
3732 range: &Range<usize>,
3733 include_extra_context: bool,
3734 theme: Option<&SyntaxTheme>,
3735 ) -> Option<OutlineItem<Point>> {
3736 let item_node = mat.captures.iter().find_map(|cap| {
3737 if cap.index == config.item_capture_ix {
3738 Some(cap.node)
3739 } else {
3740 None
3741 }
3742 })?;
3743
3744 let item_byte_range = item_node.byte_range();
3745 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3746 return None;
3747 }
3748 let item_point_range = Point::from_ts_point(item_node.start_position())
3749 ..Point::from_ts_point(item_node.end_position());
3750
3751 let mut open_point = None;
3752 let mut close_point = None;
3753 let mut buffer_ranges = Vec::new();
3754 for capture in mat.captures {
3755 let node_is_name;
3756 if capture.index == config.name_capture_ix {
3757 node_is_name = true;
3758 } else if Some(capture.index) == config.context_capture_ix
3759 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3760 {
3761 node_is_name = false;
3762 } else {
3763 if Some(capture.index) == config.open_capture_ix {
3764 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3765 } else if Some(capture.index) == config.close_capture_ix {
3766 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3767 }
3768
3769 continue;
3770 }
3771
3772 let mut range = capture.node.start_byte()..capture.node.end_byte();
3773 let start = capture.node.start_position();
3774 if capture.node.end_position().row > start.row {
3775 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3776 }
3777
3778 if !range.is_empty() {
3779 buffer_ranges.push((range, node_is_name));
3780 }
3781 }
3782 if buffer_ranges.is_empty() {
3783 return None;
3784 }
3785 let mut text = String::new();
3786 let mut highlight_ranges = Vec::new();
3787 let mut name_ranges = Vec::new();
3788 let mut chunks = self.chunks(
3789 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3790 true,
3791 );
3792 let mut last_buffer_range_end = 0;
3793
3794 for (buffer_range, is_name) in buffer_ranges {
3795 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3796 if space_added {
3797 text.push(' ');
3798 }
3799 let before_append_len = text.len();
3800 let mut offset = buffer_range.start;
3801 chunks.seek(buffer_range.clone());
3802 for mut chunk in chunks.by_ref() {
3803 if chunk.text.len() > buffer_range.end - offset {
3804 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3805 offset = buffer_range.end;
3806 } else {
3807 offset += chunk.text.len();
3808 }
3809 let style = chunk
3810 .syntax_highlight_id
3811 .zip(theme)
3812 .and_then(|(highlight, theme)| highlight.style(theme));
3813 if let Some(style) = style {
3814 let start = text.len();
3815 let end = start + chunk.text.len();
3816 highlight_ranges.push((start..end, style));
3817 }
3818 text.push_str(chunk.text);
3819 if offset >= buffer_range.end {
3820 break;
3821 }
3822 }
3823 if is_name {
3824 let after_append_len = text.len();
3825 let start = if space_added && !name_ranges.is_empty() {
3826 before_append_len - 1
3827 } else {
3828 before_append_len
3829 };
3830 name_ranges.push(start..after_append_len);
3831 }
3832 last_buffer_range_end = buffer_range.end;
3833 }
3834
3835 Some(OutlineItem {
3836 depth: 0, // We'll calculate the depth later
3837 range: item_point_range,
3838 text,
3839 highlight_ranges,
3840 name_ranges,
3841 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3842 annotation_range: None,
3843 })
3844 }
3845
3846 pub fn function_body_fold_ranges<T: ToOffset>(
3847 &self,
3848 within: Range<T>,
3849 ) -> impl Iterator<Item = Range<usize>> + '_ {
3850 self.text_object_ranges(within, TreeSitterOptions::default())
3851 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3852 }
3853
3854 /// For each grammar in the language, runs the provided
3855 /// [`tree_sitter::Query`] against the given range.
3856 pub fn matches(
3857 &self,
3858 range: Range<usize>,
3859 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3860 ) -> SyntaxMapMatches<'_> {
3861 self.syntax.matches(range, self, query)
3862 }
3863
3864 pub fn all_bracket_ranges(
3865 &self,
3866 range: Range<usize>,
3867 ) -> impl Iterator<Item = BracketMatch> + '_ {
3868 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3869 grammar.brackets_config.as_ref().map(|c| &c.query)
3870 });
3871 let configs = matches
3872 .grammars()
3873 .iter()
3874 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3875 .collect::<Vec<_>>();
3876
3877 iter::from_fn(move || {
3878 while let Some(mat) = matches.peek() {
3879 let mut open = None;
3880 let mut close = None;
3881 let config = &configs[mat.grammar_index];
3882 let pattern = &config.patterns[mat.pattern_index];
3883 for capture in mat.captures {
3884 if capture.index == config.open_capture_ix {
3885 open = Some(capture.node.byte_range());
3886 } else if capture.index == config.close_capture_ix {
3887 close = Some(capture.node.byte_range());
3888 }
3889 }
3890
3891 matches.advance();
3892
3893 let Some((open_range, close_range)) = open.zip(close) else {
3894 continue;
3895 };
3896
3897 let bracket_range = open_range.start..=close_range.end;
3898 if !bracket_range.overlaps(&range) {
3899 continue;
3900 }
3901
3902 return Some(BracketMatch {
3903 open_range,
3904 close_range,
3905 newline_only: pattern.newline_only,
3906 });
3907 }
3908 None
3909 })
3910 }
3911
3912 /// Returns bracket range pairs overlapping or adjacent to `range`
3913 pub fn bracket_ranges<T: ToOffset>(
3914 &self,
3915 range: Range<T>,
3916 ) -> impl Iterator<Item = BracketMatch> + '_ {
3917 // Find bracket pairs that *inclusively* contain the given range.
3918 let range = range.start.to_offset(self).saturating_sub(1)
3919 ..self.len().min(range.end.to_offset(self) + 1);
3920 self.all_bracket_ranges(range)
3921 .filter(|pair| !pair.newline_only)
3922 }
3923
3924 pub fn debug_variables_query<T: ToOffset>(
3925 &self,
3926 range: Range<T>,
3927 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3928 let range = range.start.to_offset(self).saturating_sub(1)
3929 ..self.len().min(range.end.to_offset(self) + 1);
3930
3931 let mut matches = self.syntax.matches_with_options(
3932 range.clone(),
3933 &self.text,
3934 TreeSitterOptions::default(),
3935 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3936 );
3937
3938 let configs = matches
3939 .grammars()
3940 .iter()
3941 .map(|grammar| grammar.debug_variables_config.as_ref())
3942 .collect::<Vec<_>>();
3943
3944 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3945
3946 iter::from_fn(move || {
3947 loop {
3948 while let Some(capture) = captures.pop() {
3949 if capture.0.overlaps(&range) {
3950 return Some(capture);
3951 }
3952 }
3953
3954 let mat = matches.peek()?;
3955
3956 let Some(config) = configs[mat.grammar_index].as_ref() else {
3957 matches.advance();
3958 continue;
3959 };
3960
3961 for capture in mat.captures {
3962 let Some(ix) = config
3963 .objects_by_capture_ix
3964 .binary_search_by_key(&capture.index, |e| e.0)
3965 .ok()
3966 else {
3967 continue;
3968 };
3969 let text_object = config.objects_by_capture_ix[ix].1;
3970 let byte_range = capture.node.byte_range();
3971
3972 let mut found = false;
3973 for (range, existing) in captures.iter_mut() {
3974 if existing == &text_object {
3975 range.start = range.start.min(byte_range.start);
3976 range.end = range.end.max(byte_range.end);
3977 found = true;
3978 break;
3979 }
3980 }
3981
3982 if !found {
3983 captures.push((byte_range, text_object));
3984 }
3985 }
3986
3987 matches.advance();
3988 }
3989 })
3990 }
3991
3992 pub fn text_object_ranges<T: ToOffset>(
3993 &self,
3994 range: Range<T>,
3995 options: TreeSitterOptions,
3996 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3997 let range = range.start.to_offset(self).saturating_sub(1)
3998 ..self.len().min(range.end.to_offset(self) + 1);
3999
4000 let mut matches =
4001 self.syntax
4002 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4003 grammar.text_object_config.as_ref().map(|c| &c.query)
4004 });
4005
4006 let configs = matches
4007 .grammars()
4008 .iter()
4009 .map(|grammar| grammar.text_object_config.as_ref())
4010 .collect::<Vec<_>>();
4011
4012 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4013
4014 iter::from_fn(move || {
4015 loop {
4016 while let Some(capture) = captures.pop() {
4017 if capture.0.overlaps(&range) {
4018 return Some(capture);
4019 }
4020 }
4021
4022 let mat = matches.peek()?;
4023
4024 let Some(config) = configs[mat.grammar_index].as_ref() else {
4025 matches.advance();
4026 continue;
4027 };
4028
4029 for capture in mat.captures {
4030 let Some(ix) = config
4031 .text_objects_by_capture_ix
4032 .binary_search_by_key(&capture.index, |e| e.0)
4033 .ok()
4034 else {
4035 continue;
4036 };
4037 let text_object = config.text_objects_by_capture_ix[ix].1;
4038 let byte_range = capture.node.byte_range();
4039
4040 let mut found = false;
4041 for (range, existing) in captures.iter_mut() {
4042 if existing == &text_object {
4043 range.start = range.start.min(byte_range.start);
4044 range.end = range.end.max(byte_range.end);
4045 found = true;
4046 break;
4047 }
4048 }
4049
4050 if !found {
4051 captures.push((byte_range, text_object));
4052 }
4053 }
4054
4055 matches.advance();
4056 }
4057 })
4058 }
4059
4060 /// Returns enclosing bracket ranges containing the given range
4061 pub fn enclosing_bracket_ranges<T: ToOffset>(
4062 &self,
4063 range: Range<T>,
4064 ) -> impl Iterator<Item = BracketMatch> + '_ {
4065 let range = range.start.to_offset(self)..range.end.to_offset(self);
4066
4067 self.bracket_ranges(range.clone()).filter(move |pair| {
4068 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4069 })
4070 }
4071
4072 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4073 ///
4074 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4075 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4076 &self,
4077 range: Range<T>,
4078 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4079 ) -> Option<(Range<usize>, Range<usize>)> {
4080 let range = range.start.to_offset(self)..range.end.to_offset(self);
4081
4082 // Get the ranges of the innermost pair of brackets.
4083 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4084
4085 for pair in self.enclosing_bracket_ranges(range) {
4086 if let Some(range_filter) = range_filter
4087 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4088 {
4089 continue;
4090 }
4091
4092 let len = pair.close_range.end - pair.open_range.start;
4093
4094 if let Some((existing_open, existing_close)) = &result {
4095 let existing_len = existing_close.end - existing_open.start;
4096 if len > existing_len {
4097 continue;
4098 }
4099 }
4100
4101 result = Some((pair.open_range, pair.close_range));
4102 }
4103
4104 result
4105 }
4106
4107 /// Returns anchor ranges for any matches of the redaction query.
4108 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4109 /// will be run on the relevant section of the buffer.
4110 pub fn redacted_ranges<T: ToOffset>(
4111 &self,
4112 range: Range<T>,
4113 ) -> impl Iterator<Item = Range<usize>> + '_ {
4114 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4115 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4116 grammar
4117 .redactions_config
4118 .as_ref()
4119 .map(|config| &config.query)
4120 });
4121
4122 let configs = syntax_matches
4123 .grammars()
4124 .iter()
4125 .map(|grammar| grammar.redactions_config.as_ref())
4126 .collect::<Vec<_>>();
4127
4128 iter::from_fn(move || {
4129 let redacted_range = syntax_matches
4130 .peek()
4131 .and_then(|mat| {
4132 configs[mat.grammar_index].and_then(|config| {
4133 mat.captures
4134 .iter()
4135 .find(|capture| capture.index == config.redaction_capture_ix)
4136 })
4137 })
4138 .map(|mat| mat.node.byte_range());
4139 syntax_matches.advance();
4140 redacted_range
4141 })
4142 }
4143
4144 pub fn injections_intersecting_range<T: ToOffset>(
4145 &self,
4146 range: Range<T>,
4147 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4148 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4149
4150 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4151 grammar
4152 .injection_config
4153 .as_ref()
4154 .map(|config| &config.query)
4155 });
4156
4157 let configs = syntax_matches
4158 .grammars()
4159 .iter()
4160 .map(|grammar| grammar.injection_config.as_ref())
4161 .collect::<Vec<_>>();
4162
4163 iter::from_fn(move || {
4164 let ranges = syntax_matches.peek().and_then(|mat| {
4165 let config = &configs[mat.grammar_index]?;
4166 let content_capture_range = mat.captures.iter().find_map(|capture| {
4167 if capture.index == config.content_capture_ix {
4168 Some(capture.node.byte_range())
4169 } else {
4170 None
4171 }
4172 })?;
4173 let language = self.language_at(content_capture_range.start)?;
4174 Some((content_capture_range, language))
4175 });
4176 syntax_matches.advance();
4177 ranges
4178 })
4179 }
4180
4181 pub fn runnable_ranges(
4182 &self,
4183 offset_range: Range<usize>,
4184 ) -> impl Iterator<Item = RunnableRange> + '_ {
4185 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4186 grammar.runnable_config.as_ref().map(|config| &config.query)
4187 });
4188
4189 let test_configs = syntax_matches
4190 .grammars()
4191 .iter()
4192 .map(|grammar| grammar.runnable_config.as_ref())
4193 .collect::<Vec<_>>();
4194
4195 iter::from_fn(move || {
4196 loop {
4197 let mat = syntax_matches.peek()?;
4198
4199 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4200 let mut run_range = None;
4201 let full_range = mat.captures.iter().fold(
4202 Range {
4203 start: usize::MAX,
4204 end: 0,
4205 },
4206 |mut acc, next| {
4207 let byte_range = next.node.byte_range();
4208 if acc.start > byte_range.start {
4209 acc.start = byte_range.start;
4210 }
4211 if acc.end < byte_range.end {
4212 acc.end = byte_range.end;
4213 }
4214 acc
4215 },
4216 );
4217 if full_range.start > full_range.end {
4218 // We did not find a full spanning range of this match.
4219 return None;
4220 }
4221 let extra_captures: SmallVec<[_; 1]> =
4222 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4223 test_configs
4224 .extra_captures
4225 .get(capture.index as usize)
4226 .cloned()
4227 .and_then(|tag_name| match tag_name {
4228 RunnableCapture::Named(name) => {
4229 Some((capture.node.byte_range(), name))
4230 }
4231 RunnableCapture::Run => {
4232 let _ = run_range.insert(capture.node.byte_range());
4233 None
4234 }
4235 })
4236 }));
4237 let run_range = run_range?;
4238 let tags = test_configs
4239 .query
4240 .property_settings(mat.pattern_index)
4241 .iter()
4242 .filter_map(|property| {
4243 if *property.key == *"tag" {
4244 property
4245 .value
4246 .as_ref()
4247 .map(|value| RunnableTag(value.to_string().into()))
4248 } else {
4249 None
4250 }
4251 })
4252 .collect();
4253 let extra_captures = extra_captures
4254 .into_iter()
4255 .map(|(range, name)| {
4256 (
4257 name.to_string(),
4258 self.text_for_range(range).collect::<String>(),
4259 )
4260 })
4261 .collect();
4262 // All tags should have the same range.
4263 Some(RunnableRange {
4264 run_range,
4265 full_range,
4266 runnable: Runnable {
4267 tags,
4268 language: mat.language,
4269 buffer: self.remote_id(),
4270 },
4271 extra_captures,
4272 buffer_id: self.remote_id(),
4273 })
4274 });
4275
4276 syntax_matches.advance();
4277 if test_range.is_some() {
4278 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4279 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4280 return test_range;
4281 }
4282 }
4283 })
4284 }
4285
4286 /// Returns selections for remote peers intersecting the given range.
4287 #[allow(clippy::type_complexity)]
4288 pub fn selections_in_range(
4289 &self,
4290 range: Range<Anchor>,
4291 include_local: bool,
4292 ) -> impl Iterator<
4293 Item = (
4294 ReplicaId,
4295 bool,
4296 CursorShape,
4297 impl Iterator<Item = &Selection<Anchor>> + '_,
4298 ),
4299 > + '_ {
4300 self.remote_selections
4301 .iter()
4302 .filter(move |(replica_id, set)| {
4303 (include_local || **replica_id != self.text.replica_id())
4304 && !set.selections.is_empty()
4305 })
4306 .map(move |(replica_id, set)| {
4307 let start_ix = match set.selections.binary_search_by(|probe| {
4308 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4309 }) {
4310 Ok(ix) | Err(ix) => ix,
4311 };
4312 let end_ix = match set.selections.binary_search_by(|probe| {
4313 probe.start.cmp(&range.end, self).then(Ordering::Less)
4314 }) {
4315 Ok(ix) | Err(ix) => ix,
4316 };
4317
4318 (
4319 *replica_id,
4320 set.line_mode,
4321 set.cursor_shape,
4322 set.selections[start_ix..end_ix].iter(),
4323 )
4324 })
4325 }
4326
4327 /// Returns if the buffer contains any diagnostics.
4328 pub fn has_diagnostics(&self) -> bool {
4329 !self.diagnostics.is_empty()
4330 }
4331
4332 /// Returns all the diagnostics intersecting the given range.
4333 pub fn diagnostics_in_range<'a, T, O>(
4334 &'a self,
4335 search_range: Range<T>,
4336 reversed: bool,
4337 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4338 where
4339 T: 'a + Clone + ToOffset,
4340 O: 'a + FromAnchor,
4341 {
4342 let mut iterators: Vec<_> = self
4343 .diagnostics
4344 .iter()
4345 .map(|(_, collection)| {
4346 collection
4347 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4348 .peekable()
4349 })
4350 .collect();
4351
4352 std::iter::from_fn(move || {
4353 let (next_ix, _) = iterators
4354 .iter_mut()
4355 .enumerate()
4356 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4357 .min_by(|(_, a), (_, b)| {
4358 let cmp = a
4359 .range
4360 .start
4361 .cmp(&b.range.start, self)
4362 // when range is equal, sort by diagnostic severity
4363 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4364 // and stabilize order with group_id
4365 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4366 if reversed { cmp.reverse() } else { cmp }
4367 })?;
4368 iterators[next_ix]
4369 .next()
4370 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4371 diagnostic,
4372 range: FromAnchor::from_anchor(&range.start, self)
4373 ..FromAnchor::from_anchor(&range.end, self),
4374 })
4375 })
4376 }
4377
4378 /// Returns all the diagnostic groups associated with the given
4379 /// language server ID. If no language server ID is provided,
4380 /// all diagnostics groups are returned.
4381 pub fn diagnostic_groups(
4382 &self,
4383 language_server_id: Option<LanguageServerId>,
4384 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4385 let mut groups = Vec::new();
4386
4387 if let Some(language_server_id) = language_server_id {
4388 if let Ok(ix) = self
4389 .diagnostics
4390 .binary_search_by_key(&language_server_id, |e| e.0)
4391 {
4392 self.diagnostics[ix]
4393 .1
4394 .groups(language_server_id, &mut groups, self);
4395 }
4396 } else {
4397 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4398 diagnostics.groups(*language_server_id, &mut groups, self);
4399 }
4400 }
4401
4402 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4403 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4404 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4405 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4406 });
4407
4408 groups
4409 }
4410
4411 /// Returns an iterator over the diagnostics for the given group.
4412 pub fn diagnostic_group<O>(
4413 &self,
4414 group_id: usize,
4415 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4416 where
4417 O: FromAnchor + 'static,
4418 {
4419 self.diagnostics
4420 .iter()
4421 .flat_map(move |(_, set)| set.group(group_id, self))
4422 }
4423
4424 /// An integer version number that accounts for all updates besides
4425 /// the buffer's text itself (which is versioned via a version vector).
4426 pub fn non_text_state_update_count(&self) -> usize {
4427 self.non_text_state_update_count
4428 }
4429
4430 /// An integer version that changes when the buffer's syntax changes.
4431 pub fn syntax_update_count(&self) -> usize {
4432 self.syntax.update_count()
4433 }
4434
4435 /// Returns a snapshot of underlying file.
4436 pub fn file(&self) -> Option<&Arc<dyn File>> {
4437 self.file.as_ref()
4438 }
4439
4440 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4441 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4442 if let Some(file) = self.file() {
4443 if file.path().file_name().is_none() || include_root {
4444 Some(file.full_path(cx))
4445 } else {
4446 Some(file.path().to_path_buf())
4447 }
4448 } else {
4449 None
4450 }
4451 }
4452
4453 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4454 let query_str = query.fuzzy_contents;
4455 if query_str.is_some_and(|query| query.is_empty()) {
4456 return BTreeMap::default();
4457 }
4458
4459 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4460 language,
4461 override_id: None,
4462 }));
4463
4464 let mut query_ix = 0;
4465 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4466 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4467
4468 let mut words = BTreeMap::default();
4469 let mut current_word_start_ix = None;
4470 let mut chunk_ix = query.range.start;
4471 for chunk in self.chunks(query.range, false) {
4472 for (i, c) in chunk.text.char_indices() {
4473 let ix = chunk_ix + i;
4474 if classifier.is_word(c) {
4475 if current_word_start_ix.is_none() {
4476 current_word_start_ix = Some(ix);
4477 }
4478
4479 if let Some(query_chars) = &query_chars
4480 && query_ix < query_len
4481 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4482 {
4483 query_ix += 1;
4484 }
4485 continue;
4486 } else if let Some(word_start) = current_word_start_ix.take()
4487 && query_ix == query_len
4488 {
4489 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4490 let mut word_text = self.text_for_range(word_start..ix).peekable();
4491 let first_char = word_text
4492 .peek()
4493 .and_then(|first_chunk| first_chunk.chars().next());
4494 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4495 if !query.skip_digits
4496 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4497 {
4498 words.insert(word_text.collect(), word_range);
4499 }
4500 }
4501 query_ix = 0;
4502 }
4503 chunk_ix += chunk.text.len();
4504 }
4505
4506 words
4507 }
4508}
4509
4510pub struct WordsQuery<'a> {
4511 /// Only returns words with all chars from the fuzzy string in them.
4512 pub fuzzy_contents: Option<&'a str>,
4513 /// Skips words that start with a digit.
4514 pub skip_digits: bool,
4515 /// Buffer offset range, to look for words.
4516 pub range: Range<usize>,
4517}
4518
4519fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4520 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4521}
4522
4523fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4524 let mut result = IndentSize::spaces(0);
4525 for c in text {
4526 let kind = match c {
4527 ' ' => IndentKind::Space,
4528 '\t' => IndentKind::Tab,
4529 _ => break,
4530 };
4531 if result.len == 0 {
4532 result.kind = kind;
4533 }
4534 result.len += 1;
4535 }
4536 result
4537}
4538
4539impl Clone for BufferSnapshot {
4540 fn clone(&self) -> Self {
4541 Self {
4542 text: self.text.clone(),
4543 syntax: self.syntax.clone(),
4544 file: self.file.clone(),
4545 remote_selections: self.remote_selections.clone(),
4546 diagnostics: self.diagnostics.clone(),
4547 language: self.language.clone(),
4548 non_text_state_update_count: self.non_text_state_update_count,
4549 }
4550 }
4551}
4552
4553impl Deref for BufferSnapshot {
4554 type Target = text::BufferSnapshot;
4555
4556 fn deref(&self) -> &Self::Target {
4557 &self.text
4558 }
4559}
4560
4561unsafe impl Send for BufferChunks<'_> {}
4562
4563impl<'a> BufferChunks<'a> {
4564 pub(crate) fn new(
4565 text: &'a Rope,
4566 range: Range<usize>,
4567 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4568 diagnostics: bool,
4569 buffer_snapshot: Option<&'a BufferSnapshot>,
4570 ) -> Self {
4571 let mut highlights = None;
4572 if let Some((captures, highlight_maps)) = syntax {
4573 highlights = Some(BufferChunkHighlights {
4574 captures,
4575 next_capture: None,
4576 stack: Default::default(),
4577 highlight_maps,
4578 })
4579 }
4580
4581 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4582 let chunks = text.chunks_in_range(range.clone());
4583
4584 let mut this = BufferChunks {
4585 range,
4586 buffer_snapshot,
4587 chunks,
4588 diagnostic_endpoints,
4589 error_depth: 0,
4590 warning_depth: 0,
4591 information_depth: 0,
4592 hint_depth: 0,
4593 unnecessary_depth: 0,
4594 underline: true,
4595 highlights,
4596 };
4597 this.initialize_diagnostic_endpoints();
4598 this
4599 }
4600
4601 /// Seeks to the given byte offset in the buffer.
4602 pub fn seek(&mut self, range: Range<usize>) {
4603 let old_range = std::mem::replace(&mut self.range, range.clone());
4604 self.chunks.set_range(self.range.clone());
4605 if let Some(highlights) = self.highlights.as_mut() {
4606 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4607 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4608 highlights
4609 .stack
4610 .retain(|(end_offset, _)| *end_offset > range.start);
4611 if let Some(capture) = &highlights.next_capture
4612 && range.start >= capture.node.start_byte()
4613 {
4614 let next_capture_end = capture.node.end_byte();
4615 if range.start < next_capture_end {
4616 highlights.stack.push((
4617 next_capture_end,
4618 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4619 ));
4620 }
4621 highlights.next_capture.take();
4622 }
4623 } else if let Some(snapshot) = self.buffer_snapshot {
4624 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4625 *highlights = BufferChunkHighlights {
4626 captures,
4627 next_capture: None,
4628 stack: Default::default(),
4629 highlight_maps,
4630 };
4631 } else {
4632 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4633 // Seeking such BufferChunks is not supported.
4634 debug_assert!(
4635 false,
4636 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4637 );
4638 }
4639
4640 highlights.captures.set_byte_range(self.range.clone());
4641 self.initialize_diagnostic_endpoints();
4642 }
4643 }
4644
4645 fn initialize_diagnostic_endpoints(&mut self) {
4646 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4647 && let Some(buffer) = self.buffer_snapshot
4648 {
4649 let mut diagnostic_endpoints = Vec::new();
4650 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4651 diagnostic_endpoints.push(DiagnosticEndpoint {
4652 offset: entry.range.start,
4653 is_start: true,
4654 severity: entry.diagnostic.severity,
4655 is_unnecessary: entry.diagnostic.is_unnecessary,
4656 underline: entry.diagnostic.underline,
4657 });
4658 diagnostic_endpoints.push(DiagnosticEndpoint {
4659 offset: entry.range.end,
4660 is_start: false,
4661 severity: entry.diagnostic.severity,
4662 is_unnecessary: entry.diagnostic.is_unnecessary,
4663 underline: entry.diagnostic.underline,
4664 });
4665 }
4666 diagnostic_endpoints
4667 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4668 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4669 self.hint_depth = 0;
4670 self.error_depth = 0;
4671 self.warning_depth = 0;
4672 self.information_depth = 0;
4673 }
4674 }
4675
4676 /// The current byte offset in the buffer.
4677 pub fn offset(&self) -> usize {
4678 self.range.start
4679 }
4680
4681 pub fn range(&self) -> Range<usize> {
4682 self.range.clone()
4683 }
4684
4685 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4686 let depth = match endpoint.severity {
4687 DiagnosticSeverity::ERROR => &mut self.error_depth,
4688 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4689 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4690 DiagnosticSeverity::HINT => &mut self.hint_depth,
4691 _ => return,
4692 };
4693 if endpoint.is_start {
4694 *depth += 1;
4695 } else {
4696 *depth -= 1;
4697 }
4698
4699 if endpoint.is_unnecessary {
4700 if endpoint.is_start {
4701 self.unnecessary_depth += 1;
4702 } else {
4703 self.unnecessary_depth -= 1;
4704 }
4705 }
4706 }
4707
4708 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4709 if self.error_depth > 0 {
4710 Some(DiagnosticSeverity::ERROR)
4711 } else if self.warning_depth > 0 {
4712 Some(DiagnosticSeverity::WARNING)
4713 } else if self.information_depth > 0 {
4714 Some(DiagnosticSeverity::INFORMATION)
4715 } else if self.hint_depth > 0 {
4716 Some(DiagnosticSeverity::HINT)
4717 } else {
4718 None
4719 }
4720 }
4721
4722 fn current_code_is_unnecessary(&self) -> bool {
4723 self.unnecessary_depth > 0
4724 }
4725}
4726
4727impl<'a> Iterator for BufferChunks<'a> {
4728 type Item = Chunk<'a>;
4729
4730 fn next(&mut self) -> Option<Self::Item> {
4731 let mut next_capture_start = usize::MAX;
4732 let mut next_diagnostic_endpoint = usize::MAX;
4733
4734 if let Some(highlights) = self.highlights.as_mut() {
4735 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4736 if *parent_capture_end <= self.range.start {
4737 highlights.stack.pop();
4738 } else {
4739 break;
4740 }
4741 }
4742
4743 if highlights.next_capture.is_none() {
4744 highlights.next_capture = highlights.captures.next();
4745 }
4746
4747 while let Some(capture) = highlights.next_capture.as_ref() {
4748 if self.range.start < capture.node.start_byte() {
4749 next_capture_start = capture.node.start_byte();
4750 break;
4751 } else {
4752 let highlight_id =
4753 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4754 highlights
4755 .stack
4756 .push((capture.node.end_byte(), highlight_id));
4757 highlights.next_capture = highlights.captures.next();
4758 }
4759 }
4760 }
4761
4762 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4763 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4764 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4765 if endpoint.offset <= self.range.start {
4766 self.update_diagnostic_depths(endpoint);
4767 diagnostic_endpoints.next();
4768 self.underline = endpoint.underline;
4769 } else {
4770 next_diagnostic_endpoint = endpoint.offset;
4771 break;
4772 }
4773 }
4774 }
4775 self.diagnostic_endpoints = diagnostic_endpoints;
4776
4777 if let Some(chunk) = self.chunks.peek() {
4778 let chunk_start = self.range.start;
4779 let mut chunk_end = (self.chunks.offset() + chunk.len())
4780 .min(next_capture_start)
4781 .min(next_diagnostic_endpoint);
4782 let mut highlight_id = None;
4783 if let Some(highlights) = self.highlights.as_ref()
4784 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4785 {
4786 chunk_end = chunk_end.min(*parent_capture_end);
4787 highlight_id = Some(*parent_highlight_id);
4788 }
4789
4790 let slice =
4791 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4792 self.range.start = chunk_end;
4793 if self.range.start == self.chunks.offset() + chunk.len() {
4794 self.chunks.next().unwrap();
4795 }
4796
4797 Some(Chunk {
4798 text: slice,
4799 syntax_highlight_id: highlight_id,
4800 underline: self.underline,
4801 diagnostic_severity: self.current_diagnostic_severity(),
4802 is_unnecessary: self.current_code_is_unnecessary(),
4803 ..Chunk::default()
4804 })
4805 } else {
4806 None
4807 }
4808 }
4809}
4810
4811impl operation_queue::Operation for Operation {
4812 fn lamport_timestamp(&self) -> clock::Lamport {
4813 match self {
4814 Operation::Buffer(_) => {
4815 unreachable!("buffer operations should never be deferred at this layer")
4816 }
4817 Operation::UpdateDiagnostics {
4818 lamport_timestamp, ..
4819 }
4820 | Operation::UpdateSelections {
4821 lamport_timestamp, ..
4822 }
4823 | Operation::UpdateCompletionTriggers {
4824 lamport_timestamp, ..
4825 } => *lamport_timestamp,
4826 }
4827 }
4828}
4829
4830impl Default for Diagnostic {
4831 fn default() -> Self {
4832 Self {
4833 source: Default::default(),
4834 source_kind: DiagnosticSourceKind::Other,
4835 code: None,
4836 code_description: None,
4837 severity: DiagnosticSeverity::ERROR,
4838 message: Default::default(),
4839 markdown: None,
4840 group_id: 0,
4841 is_primary: false,
4842 is_disk_based: false,
4843 is_unnecessary: false,
4844 underline: true,
4845 data: None,
4846 }
4847 }
4848}
4849
4850impl IndentSize {
4851 /// Returns an [`IndentSize`] representing the given spaces.
4852 pub fn spaces(len: u32) -> Self {
4853 Self {
4854 len,
4855 kind: IndentKind::Space,
4856 }
4857 }
4858
4859 /// Returns an [`IndentSize`] representing a tab.
4860 pub fn tab() -> Self {
4861 Self {
4862 len: 1,
4863 kind: IndentKind::Tab,
4864 }
4865 }
4866
4867 /// An iterator over the characters represented by this [`IndentSize`].
4868 pub fn chars(&self) -> impl Iterator<Item = char> {
4869 iter::repeat(self.char()).take(self.len as usize)
4870 }
4871
4872 /// The character representation of this [`IndentSize`].
4873 pub fn char(&self) -> char {
4874 match self.kind {
4875 IndentKind::Space => ' ',
4876 IndentKind::Tab => '\t',
4877 }
4878 }
4879
4880 /// Consumes the current [`IndentSize`] and returns a new one that has
4881 /// been shrunk or enlarged by the given size along the given direction.
4882 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4883 match direction {
4884 Ordering::Less => {
4885 if self.kind == size.kind && self.len >= size.len {
4886 self.len -= size.len;
4887 }
4888 }
4889 Ordering::Equal => {}
4890 Ordering::Greater => {
4891 if self.len == 0 {
4892 self = size;
4893 } else if self.kind == size.kind {
4894 self.len += size.len;
4895 }
4896 }
4897 }
4898 self
4899 }
4900
4901 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4902 match self.kind {
4903 IndentKind::Space => self.len as usize,
4904 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4905 }
4906 }
4907}
4908
4909#[cfg(any(test, feature = "test-support"))]
4910pub struct TestFile {
4911 pub path: Arc<Path>,
4912 pub root_name: String,
4913 pub local_root: Option<PathBuf>,
4914}
4915
4916#[cfg(any(test, feature = "test-support"))]
4917impl File for TestFile {
4918 fn path(&self) -> &Arc<Path> {
4919 &self.path
4920 }
4921
4922 fn full_path(&self, _: &gpui::App) -> PathBuf {
4923 PathBuf::from(&self.root_name).join(self.path.as_ref())
4924 }
4925
4926 fn as_local(&self) -> Option<&dyn LocalFile> {
4927 if self.local_root.is_some() {
4928 Some(self)
4929 } else {
4930 None
4931 }
4932 }
4933
4934 fn disk_state(&self) -> DiskState {
4935 unimplemented!()
4936 }
4937
4938 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4939 self.path().file_name().unwrap_or(self.root_name.as_ref())
4940 }
4941
4942 fn worktree_id(&self, _: &App) -> WorktreeId {
4943 WorktreeId::from_usize(0)
4944 }
4945
4946 fn to_proto(&self, _: &App) -> rpc::proto::File {
4947 unimplemented!()
4948 }
4949
4950 fn is_private(&self) -> bool {
4951 false
4952 }
4953}
4954
4955#[cfg(any(test, feature = "test-support"))]
4956impl LocalFile for TestFile {
4957 fn abs_path(&self, _cx: &App) -> PathBuf {
4958 PathBuf::from(self.local_root.as_ref().unwrap())
4959 .join(&self.root_name)
4960 .join(self.path.as_ref())
4961 }
4962
4963 fn load(&self, _cx: &App) -> Task<Result<String>> {
4964 unimplemented!()
4965 }
4966
4967 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4968 unimplemented!()
4969 }
4970}
4971
4972pub(crate) fn contiguous_ranges(
4973 values: impl Iterator<Item = u32>,
4974 max_len: usize,
4975) -> impl Iterator<Item = Range<u32>> {
4976 let mut values = values;
4977 let mut current_range: Option<Range<u32>> = None;
4978 std::iter::from_fn(move || {
4979 loop {
4980 if let Some(value) = values.next() {
4981 if let Some(range) = &mut current_range
4982 && value == range.end
4983 && range.len() < max_len
4984 {
4985 range.end += 1;
4986 continue;
4987 }
4988
4989 let prev_range = current_range.clone();
4990 current_range = Some(value..(value + 1));
4991 if prev_range.is_some() {
4992 return prev_range;
4993 }
4994 } else {
4995 return current_range.take();
4996 }
4997 }
4998 })
4999}
5000
5001#[derive(Default, Debug)]
5002pub struct CharClassifier {
5003 scope: Option<LanguageScope>,
5004 for_completion: bool,
5005 ignore_punctuation: bool,
5006}
5007
5008impl CharClassifier {
5009 pub fn new(scope: Option<LanguageScope>) -> Self {
5010 Self {
5011 scope,
5012 for_completion: false,
5013 ignore_punctuation: false,
5014 }
5015 }
5016
5017 pub fn for_completion(self, for_completion: bool) -> Self {
5018 Self {
5019 for_completion,
5020 ..self
5021 }
5022 }
5023
5024 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5025 Self {
5026 ignore_punctuation,
5027 ..self
5028 }
5029 }
5030
5031 pub fn is_whitespace(&self, c: char) -> bool {
5032 self.kind(c) == CharKind::Whitespace
5033 }
5034
5035 pub fn is_word(&self, c: char) -> bool {
5036 self.kind(c) == CharKind::Word
5037 }
5038
5039 pub fn is_punctuation(&self, c: char) -> bool {
5040 self.kind(c) == CharKind::Punctuation
5041 }
5042
5043 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5044 if c.is_alphanumeric() || c == '_' {
5045 return CharKind::Word;
5046 }
5047
5048 if let Some(scope) = &self.scope {
5049 let characters = if self.for_completion {
5050 scope.completion_query_characters()
5051 } else {
5052 scope.word_characters()
5053 };
5054 if let Some(characters) = characters
5055 && characters.contains(&c)
5056 {
5057 return CharKind::Word;
5058 }
5059 }
5060
5061 if c.is_whitespace() {
5062 return CharKind::Whitespace;
5063 }
5064
5065 if ignore_punctuation {
5066 CharKind::Word
5067 } else {
5068 CharKind::Punctuation
5069 }
5070 }
5071
5072 pub fn kind(&self, c: char) -> CharKind {
5073 self.kind_with(c, self.ignore_punctuation)
5074 }
5075}
5076
5077/// Find all of the ranges of whitespace that occur at the ends of lines
5078/// in the given rope.
5079///
5080/// This could also be done with a regex search, but this implementation
5081/// avoids copying text.
5082pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5083 let mut ranges = Vec::new();
5084
5085 let mut offset = 0;
5086 let mut prev_chunk_trailing_whitespace_range = 0..0;
5087 for chunk in rope.chunks() {
5088 let mut prev_line_trailing_whitespace_range = 0..0;
5089 for (i, line) in chunk.split('\n').enumerate() {
5090 let line_end_offset = offset + line.len();
5091 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5092 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5093
5094 if i == 0 && trimmed_line_len == 0 {
5095 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5096 }
5097 if !prev_line_trailing_whitespace_range.is_empty() {
5098 ranges.push(prev_line_trailing_whitespace_range);
5099 }
5100
5101 offset = line_end_offset + 1;
5102 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5103 }
5104
5105 offset -= 1;
5106 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5107 }
5108
5109 if !prev_chunk_trailing_whitespace_range.is_empty() {
5110 ranges.push(prev_chunk_trailing_whitespace_range);
5111 }
5112
5113 ranges
5114}