1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation
1162 && let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1188 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1189 {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193
1194 self.apply_ops([operation.clone()], cx);
1195
1196 if let Some(timestamp) = operation_to_undo {
1197 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1198 self.undo_operations(counts, cx);
1199 }
1200 }
1201
1202 #[cfg(test)]
1203 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1204 &self.text
1205 }
1206
1207 /// Retrieve a snapshot of the buffer's raw text, without any
1208 /// language-related state like the syntax tree or diagnostics.
1209 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1210 self.text.snapshot()
1211 }
1212
1213 /// The file associated with the buffer, if any.
1214 pub fn file(&self) -> Option<&Arc<dyn File>> {
1215 self.file.as_ref()
1216 }
1217
1218 /// The version of the buffer that was last saved or reloaded from disk.
1219 pub fn saved_version(&self) -> &clock::Global {
1220 &self.saved_version
1221 }
1222
1223 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1224 pub fn saved_mtime(&self) -> Option<MTime> {
1225 self.saved_mtime
1226 }
1227
1228 /// Assign a language to the buffer.
1229 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1230 self.non_text_state_update_count += 1;
1231 self.syntax_map.lock().clear(&self.text);
1232 self.language = language;
1233 self.was_changed();
1234 self.reparse(cx);
1235 cx.emit(BufferEvent::LanguageChanged);
1236 }
1237
1238 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1239 /// other languages if parts of the buffer are written in different languages.
1240 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1241 self.syntax_map
1242 .lock()
1243 .set_language_registry(language_registry);
1244 }
1245
1246 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1247 self.syntax_map.lock().language_registry()
1248 }
1249
1250 /// Assign the buffer a new [`Capability`].
1251 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1252 self.capability = capability;
1253 cx.emit(BufferEvent::CapabilityChanged)
1254 }
1255
1256 /// This method is called to signal that the buffer has been saved.
1257 pub fn did_save(
1258 &mut self,
1259 version: clock::Global,
1260 mtime: Option<MTime>,
1261 cx: &mut Context<Self>,
1262 ) {
1263 self.saved_version = version;
1264 self.has_unsaved_edits
1265 .set((self.saved_version().clone(), false));
1266 self.has_conflict = false;
1267 self.saved_mtime = mtime;
1268 self.was_changed();
1269 cx.emit(BufferEvent::Saved);
1270 cx.notify();
1271 }
1272
1273 /// This method is called to signal that the buffer has been discarded.
1274 pub fn discarded(&self, cx: &mut Context<Self>) {
1275 cx.emit(BufferEvent::Discarded);
1276 cx.notify();
1277 }
1278
1279 /// Reloads the contents of the buffer from disk.
1280 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1281 let (tx, rx) = futures::channel::oneshot::channel();
1282 let prev_version = self.text.version();
1283 self.reload_task = Some(cx.spawn(async move |this, cx| {
1284 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1285 let file = this.file.as_ref()?.as_local()?;
1286
1287 Some((file.disk_state().mtime(), file.load(cx)))
1288 })?
1289 else {
1290 return Ok(());
1291 };
1292
1293 let new_text = new_text.await?;
1294 let diff = this
1295 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1296 .await;
1297 this.update(cx, |this, cx| {
1298 if this.version() == diff.base_version {
1299 this.finalize_last_transaction();
1300 this.apply_diff(diff, cx);
1301 tx.send(this.finalize_last_transaction().cloned()).ok();
1302 this.has_conflict = false;
1303 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1304 } else {
1305 if !diff.edits.is_empty()
1306 || this
1307 .edits_since::<usize>(&diff.base_version)
1308 .next()
1309 .is_some()
1310 {
1311 this.has_conflict = true;
1312 }
1313
1314 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1315 }
1316
1317 this.reload_task.take();
1318 })
1319 }));
1320 rx
1321 }
1322
1323 /// This method is called to signal that the buffer has been reloaded.
1324 pub fn did_reload(
1325 &mut self,
1326 version: clock::Global,
1327 line_ending: LineEnding,
1328 mtime: Option<MTime>,
1329 cx: &mut Context<Self>,
1330 ) {
1331 self.saved_version = version;
1332 self.has_unsaved_edits
1333 .set((self.saved_version.clone(), false));
1334 self.text.set_line_ending(line_ending);
1335 self.saved_mtime = mtime;
1336 cx.emit(BufferEvent::Reloaded);
1337 cx.notify();
1338 }
1339
1340 /// Updates the [`File`] backing this buffer. This should be called when
1341 /// the file has changed or has been deleted.
1342 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1343 let was_dirty = self.is_dirty();
1344 let mut file_changed = false;
1345
1346 if let Some(old_file) = self.file.as_ref() {
1347 if new_file.path() != old_file.path() {
1348 file_changed = true;
1349 }
1350
1351 let old_state = old_file.disk_state();
1352 let new_state = new_file.disk_state();
1353 if old_state != new_state {
1354 file_changed = true;
1355 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1356 cx.emit(BufferEvent::ReloadNeeded)
1357 }
1358 }
1359 } else {
1360 file_changed = true;
1361 };
1362
1363 self.file = Some(new_file);
1364 if file_changed {
1365 self.was_changed();
1366 self.non_text_state_update_count += 1;
1367 if was_dirty != self.is_dirty() {
1368 cx.emit(BufferEvent::DirtyChanged);
1369 }
1370 cx.emit(BufferEvent::FileHandleChanged);
1371 cx.notify();
1372 }
1373 }
1374
1375 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1376 Some(self.branch_state.as_ref()?.base_buffer.clone())
1377 }
1378
1379 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1380 pub fn language(&self) -> Option<&Arc<Language>> {
1381 self.language.as_ref()
1382 }
1383
1384 /// Returns the [`Language`] at the given location.
1385 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1386 let offset = position.to_offset(self);
1387 let mut is_first = true;
1388 let start_anchor = self.anchor_before(offset);
1389 let end_anchor = self.anchor_after(offset);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .filter(|layer| {
1394 if is_first {
1395 is_first = false;
1396 return true;
1397 }
1398
1399 layer
1400 .included_sub_ranges
1401 .map(|sub_ranges| {
1402 sub_ranges.iter().any(|sub_range| {
1403 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1404 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1405 !is_before_start && !is_after_end
1406 })
1407 })
1408 .unwrap_or(true)
1409 })
1410 .last()
1411 .map(|info| info.language.clone())
1412 .or_else(|| self.language.clone())
1413 }
1414
1415 /// Returns each [`Language`] for the active syntax layers at the given location.
1416 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1417 let offset = position.to_offset(self);
1418 let mut languages: Vec<Arc<Language>> = self
1419 .syntax_map
1420 .lock()
1421 .layers_for_range(offset..offset, &self.text, false)
1422 .map(|info| info.language.clone())
1423 .collect();
1424
1425 if languages.is_empty()
1426 && let Some(buffer_language) = self.language()
1427 {
1428 languages.push(buffer_language.clone());
1429 }
1430
1431 languages
1432 }
1433
1434 /// An integer version number that accounts for all updates besides
1435 /// the buffer's text itself (which is versioned via a version vector).
1436 pub fn non_text_state_update_count(&self) -> usize {
1437 self.non_text_state_update_count
1438 }
1439
1440 /// Whether the buffer is being parsed in the background.
1441 #[cfg(any(test, feature = "test-support"))]
1442 pub fn is_parsing(&self) -> bool {
1443 self.reparse.is_some()
1444 }
1445
1446 /// Indicates whether the buffer contains any regions that may be
1447 /// written in a language that hasn't been loaded yet.
1448 pub fn contains_unknown_injections(&self) -> bool {
1449 self.syntax_map.lock().contains_unknown_injections()
1450 }
1451
1452 #[cfg(any(test, feature = "test-support"))]
1453 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1454 self.sync_parse_timeout = timeout;
1455 }
1456
1457 /// Called after an edit to synchronize the buffer's main parse tree with
1458 /// the buffer's new underlying state.
1459 ///
1460 /// Locks the syntax map and interpolates the edits since the last reparse
1461 /// into the foreground syntax tree.
1462 ///
1463 /// Then takes a stable snapshot of the syntax map before unlocking it.
1464 /// The snapshot with the interpolated edits is sent to a background thread,
1465 /// where we ask Tree-sitter to perform an incremental parse.
1466 ///
1467 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1468 /// waiting on the parse to complete. As soon as it completes, we proceed
1469 /// synchronously, unless a 1ms timeout elapses.
1470 ///
1471 /// If we time out waiting on the parse, we spawn a second task waiting
1472 /// until the parse does complete and return with the interpolated tree still
1473 /// in the foreground. When the background parse completes, call back into
1474 /// the main thread and assign the foreground parse state.
1475 ///
1476 /// If the buffer or grammar changed since the start of the background parse,
1477 /// initiate an additional reparse recursively. To avoid concurrent parses
1478 /// for the same buffer, we only initiate a new parse if we are not already
1479 /// parsing in the background.
1480 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1481 if self.reparse.is_some() {
1482 return;
1483 }
1484 let language = if let Some(language) = self.language.clone() {
1485 language
1486 } else {
1487 return;
1488 };
1489
1490 let text = self.text_snapshot();
1491 let parsed_version = self.version();
1492
1493 let mut syntax_map = self.syntax_map.lock();
1494 syntax_map.interpolate(&text);
1495 let language_registry = syntax_map.language_registry();
1496 let mut syntax_snapshot = syntax_map.snapshot();
1497 drop(syntax_map);
1498
1499 let parse_task = cx.background_spawn({
1500 let language = language.clone();
1501 let language_registry = language_registry.clone();
1502 async move {
1503 syntax_snapshot.reparse(&text, language_registry, language);
1504 syntax_snapshot
1505 }
1506 });
1507
1508 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1509 match cx
1510 .background_executor()
1511 .block_with_timeout(self.sync_parse_timeout, parse_task)
1512 {
1513 Ok(new_syntax_snapshot) => {
1514 self.did_finish_parsing(new_syntax_snapshot, cx);
1515 self.reparse = None;
1516 }
1517 Err(parse_task) => {
1518 self.reparse = Some(cx.spawn(async move |this, cx| {
1519 let new_syntax_map = parse_task.await;
1520 this.update(cx, move |this, cx| {
1521 let grammar_changed =
1522 this.language.as_ref().is_none_or(|current_language| {
1523 !Arc::ptr_eq(&language, current_language)
1524 });
1525 let language_registry_changed = new_syntax_map
1526 .contains_unknown_injections()
1527 && language_registry.is_some_and(|registry| {
1528 registry.version() != new_syntax_map.language_registry_version()
1529 });
1530 let parse_again = language_registry_changed
1531 || grammar_changed
1532 || this.version.changed_since(&parsed_version);
1533 this.did_finish_parsing(new_syntax_map, cx);
1534 this.reparse = None;
1535 if parse_again {
1536 this.reparse(cx);
1537 }
1538 })
1539 .ok();
1540 }));
1541 }
1542 }
1543 }
1544
1545 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1546 self.was_changed();
1547 self.non_text_state_update_count += 1;
1548 self.syntax_map.lock().did_parse(syntax_snapshot);
1549 self.request_autoindent(cx);
1550 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1551 cx.emit(BufferEvent::Reparsed);
1552 cx.notify();
1553 }
1554
1555 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1556 self.parse_status.1.clone()
1557 }
1558
1559 /// Assign to the buffer a set of diagnostics created by a given language server.
1560 pub fn update_diagnostics(
1561 &mut self,
1562 server_id: LanguageServerId,
1563 diagnostics: DiagnosticSet,
1564 cx: &mut Context<Self>,
1565 ) {
1566 let lamport_timestamp = self.text.lamport_clock.tick();
1567 let op = Operation::UpdateDiagnostics {
1568 server_id,
1569 diagnostics: diagnostics.iter().cloned().collect(),
1570 lamport_timestamp,
1571 };
1572
1573 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1574 self.send_operation(op, true, cx);
1575 }
1576
1577 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1578 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1579 return None;
1580 };
1581 Some(&self.diagnostics[idx].1)
1582 }
1583
1584 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1585 if let Some(indent_sizes) = self.compute_autoindents() {
1586 let indent_sizes = cx.background_spawn(indent_sizes);
1587 match cx
1588 .background_executor()
1589 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1590 {
1591 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1592 Err(indent_sizes) => {
1593 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1594 let indent_sizes = indent_sizes.await;
1595 this.update(cx, |this, cx| {
1596 this.apply_autoindents(indent_sizes, cx);
1597 })
1598 .ok();
1599 }));
1600 }
1601 }
1602 } else {
1603 self.autoindent_requests.clear();
1604 for tx in self.wait_for_autoindent_txs.drain(..) {
1605 tx.send(()).ok();
1606 }
1607 }
1608 }
1609
1610 fn compute_autoindents(
1611 &self,
1612 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1613 let max_rows_between_yields = 100;
1614 let snapshot = self.snapshot();
1615 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1616 return None;
1617 }
1618
1619 let autoindent_requests = self.autoindent_requests.clone();
1620 Some(async move {
1621 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1622 for request in autoindent_requests {
1623 // Resolve each edited range to its row in the current buffer and in the
1624 // buffer before this batch of edits.
1625 let mut row_ranges = Vec::new();
1626 let mut old_to_new_rows = BTreeMap::new();
1627 let mut language_indent_sizes_by_new_row = Vec::new();
1628 for entry in &request.entries {
1629 let position = entry.range.start;
1630 let new_row = position.to_point(&snapshot).row;
1631 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1632 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1633
1634 if !entry.first_line_is_new {
1635 let old_row = position.to_point(&request.before_edit).row;
1636 old_to_new_rows.insert(old_row, new_row);
1637 }
1638 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1639 }
1640
1641 // Build a map containing the suggested indentation for each of the edited lines
1642 // with respect to the state of the buffer before these edits. This map is keyed
1643 // by the rows for these lines in the current state of the buffer.
1644 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1645 let old_edited_ranges =
1646 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1647 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1648 let mut language_indent_size = IndentSize::default();
1649 for old_edited_range in old_edited_ranges {
1650 let suggestions = request
1651 .before_edit
1652 .suggest_autoindents(old_edited_range.clone())
1653 .into_iter()
1654 .flatten();
1655 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1656 if let Some(suggestion) = suggestion {
1657 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1658
1659 // Find the indent size based on the language for this row.
1660 while let Some((row, size)) = language_indent_sizes.peek() {
1661 if *row > new_row {
1662 break;
1663 }
1664 language_indent_size = *size;
1665 language_indent_sizes.next();
1666 }
1667
1668 let suggested_indent = old_to_new_rows
1669 .get(&suggestion.basis_row)
1670 .and_then(|from_row| {
1671 Some(old_suggestions.get(from_row).copied()?.0)
1672 })
1673 .unwrap_or_else(|| {
1674 request
1675 .before_edit
1676 .indent_size_for_line(suggestion.basis_row)
1677 })
1678 .with_delta(suggestion.delta, language_indent_size);
1679 old_suggestions
1680 .insert(new_row, (suggested_indent, suggestion.within_error));
1681 }
1682 }
1683 yield_now().await;
1684 }
1685
1686 // Compute new suggestions for each line, but only include them in the result
1687 // if they differ from the old suggestion for that line.
1688 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1689 let mut language_indent_size = IndentSize::default();
1690 for (row_range, original_indent_column) in row_ranges {
1691 let new_edited_row_range = if request.is_block_mode {
1692 row_range.start..row_range.start + 1
1693 } else {
1694 row_range.clone()
1695 };
1696
1697 let suggestions = snapshot
1698 .suggest_autoindents(new_edited_row_range.clone())
1699 .into_iter()
1700 .flatten();
1701 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1702 if let Some(suggestion) = suggestion {
1703 // Find the indent size based on the language for this row.
1704 while let Some((row, size)) = language_indent_sizes.peek() {
1705 if *row > new_row {
1706 break;
1707 }
1708 language_indent_size = *size;
1709 language_indent_sizes.next();
1710 }
1711
1712 let suggested_indent = indent_sizes
1713 .get(&suggestion.basis_row)
1714 .copied()
1715 .map(|e| e.0)
1716 .unwrap_or_else(|| {
1717 snapshot.indent_size_for_line(suggestion.basis_row)
1718 })
1719 .with_delta(suggestion.delta, language_indent_size);
1720
1721 if old_suggestions.get(&new_row).is_none_or(
1722 |(old_indentation, was_within_error)| {
1723 suggested_indent != *old_indentation
1724 && (!suggestion.within_error || *was_within_error)
1725 },
1726 ) {
1727 indent_sizes.insert(
1728 new_row,
1729 (suggested_indent, request.ignore_empty_lines),
1730 );
1731 }
1732 }
1733 }
1734
1735 if let (true, Some(original_indent_column)) =
1736 (request.is_block_mode, original_indent_column)
1737 {
1738 let new_indent =
1739 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1740 *indent
1741 } else {
1742 snapshot.indent_size_for_line(row_range.start)
1743 };
1744 let delta = new_indent.len as i64 - original_indent_column as i64;
1745 if delta != 0 {
1746 for row in row_range.skip(1) {
1747 indent_sizes.entry(row).or_insert_with(|| {
1748 let mut size = snapshot.indent_size_for_line(row);
1749 if size.kind == new_indent.kind {
1750 match delta.cmp(&0) {
1751 Ordering::Greater => size.len += delta as u32,
1752 Ordering::Less => {
1753 size.len = size.len.saturating_sub(-delta as u32)
1754 }
1755 Ordering::Equal => {}
1756 }
1757 }
1758 (size, request.ignore_empty_lines)
1759 });
1760 }
1761 }
1762 }
1763
1764 yield_now().await;
1765 }
1766 }
1767
1768 indent_sizes
1769 .into_iter()
1770 .filter_map(|(row, (indent, ignore_empty_lines))| {
1771 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1772 None
1773 } else {
1774 Some((row, indent))
1775 }
1776 })
1777 .collect()
1778 })
1779 }
1780
1781 fn apply_autoindents(
1782 &mut self,
1783 indent_sizes: BTreeMap<u32, IndentSize>,
1784 cx: &mut Context<Self>,
1785 ) {
1786 self.autoindent_requests.clear();
1787 for tx in self.wait_for_autoindent_txs.drain(..) {
1788 tx.send(()).ok();
1789 }
1790
1791 let edits: Vec<_> = indent_sizes
1792 .into_iter()
1793 .filter_map(|(row, indent_size)| {
1794 let current_size = indent_size_for_line(self, row);
1795 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1796 })
1797 .collect();
1798
1799 let preserve_preview = self.preserve_preview();
1800 self.edit(edits, None, cx);
1801 if preserve_preview {
1802 self.refresh_preview();
1803 }
1804 }
1805
1806 /// Create a minimal edit that will cause the given row to be indented
1807 /// with the given size. After applying this edit, the length of the line
1808 /// will always be at least `new_size.len`.
1809 pub fn edit_for_indent_size_adjustment(
1810 row: u32,
1811 current_size: IndentSize,
1812 new_size: IndentSize,
1813 ) -> Option<(Range<Point>, String)> {
1814 if new_size.kind == current_size.kind {
1815 match new_size.len.cmp(¤t_size.len) {
1816 Ordering::Greater => {
1817 let point = Point::new(row, 0);
1818 Some((
1819 point..point,
1820 iter::repeat(new_size.char())
1821 .take((new_size.len - current_size.len) as usize)
1822 .collect::<String>(),
1823 ))
1824 }
1825
1826 Ordering::Less => Some((
1827 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1828 String::new(),
1829 )),
1830
1831 Ordering::Equal => None,
1832 }
1833 } else {
1834 Some((
1835 Point::new(row, 0)..Point::new(row, current_size.len),
1836 iter::repeat(new_size.char())
1837 .take(new_size.len as usize)
1838 .collect::<String>(),
1839 ))
1840 }
1841 }
1842
1843 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1844 /// and the given new text.
1845 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1846 let old_text = self.as_rope().clone();
1847 let base_version = self.version();
1848 cx.background_executor()
1849 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1850 let old_text = old_text.to_string();
1851 let line_ending = LineEnding::detect(&new_text);
1852 LineEnding::normalize(&mut new_text);
1853 let edits = text_diff(&old_text, &new_text);
1854 Diff {
1855 base_version,
1856 line_ending,
1857 edits,
1858 }
1859 })
1860 }
1861
1862 /// Spawns a background task that searches the buffer for any whitespace
1863 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1864 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1865 let old_text = self.as_rope().clone();
1866 let line_ending = self.line_ending();
1867 let base_version = self.version();
1868 cx.background_spawn(async move {
1869 let ranges = trailing_whitespace_ranges(&old_text);
1870 let empty = Arc::<str>::from("");
1871 Diff {
1872 base_version,
1873 line_ending,
1874 edits: ranges
1875 .into_iter()
1876 .map(|range| (range, empty.clone()))
1877 .collect(),
1878 }
1879 })
1880 }
1881
1882 /// Ensures that the buffer ends with a single newline character, and
1883 /// no other whitespace. Skips if the buffer is empty.
1884 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1885 let len = self.len();
1886 if len == 0 {
1887 return;
1888 }
1889 let mut offset = len;
1890 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1891 let non_whitespace_len = chunk
1892 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1893 .len();
1894 offset -= chunk.len();
1895 offset += non_whitespace_len;
1896 if non_whitespace_len != 0 {
1897 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1898 return;
1899 }
1900 break;
1901 }
1902 }
1903 self.edit([(offset..len, "\n")], None, cx);
1904 }
1905
1906 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1907 /// calculated, then adjust the diff to account for those changes, and discard any
1908 /// parts of the diff that conflict with those changes.
1909 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1910 let snapshot = self.snapshot();
1911 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1912 let mut delta = 0;
1913 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1914 while let Some(edit_since) = edits_since.peek() {
1915 // If the edit occurs after a diff hunk, then it does not
1916 // affect that hunk.
1917 if edit_since.old.start > range.end {
1918 break;
1919 }
1920 // If the edit precedes the diff hunk, then adjust the hunk
1921 // to reflect the edit.
1922 else if edit_since.old.end < range.start {
1923 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1924 edits_since.next();
1925 }
1926 // If the edit intersects a diff hunk, then discard that hunk.
1927 else {
1928 return None;
1929 }
1930 }
1931
1932 let start = (range.start as i64 + delta) as usize;
1933 let end = (range.end as i64 + delta) as usize;
1934 Some((start..end, new_text))
1935 });
1936
1937 self.start_transaction();
1938 self.text.set_line_ending(diff.line_ending);
1939 self.edit(adjusted_edits, None, cx);
1940 self.end_transaction(cx)
1941 }
1942
1943 fn has_unsaved_edits(&self) -> bool {
1944 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1945
1946 if last_version == self.version {
1947 self.has_unsaved_edits
1948 .set((last_version, has_unsaved_edits));
1949 return has_unsaved_edits;
1950 }
1951
1952 let has_edits = self.has_edits_since(&self.saved_version);
1953 self.has_unsaved_edits
1954 .set((self.version.clone(), has_edits));
1955 has_edits
1956 }
1957
1958 /// Checks if the buffer has unsaved changes.
1959 pub fn is_dirty(&self) -> bool {
1960 if self.capability == Capability::ReadOnly {
1961 return false;
1962 }
1963 if self.has_conflict {
1964 return true;
1965 }
1966 match self.file.as_ref().map(|f| f.disk_state()) {
1967 Some(DiskState::New) | Some(DiskState::Deleted) => {
1968 !self.is_empty() && self.has_unsaved_edits()
1969 }
1970 _ => self.has_unsaved_edits(),
1971 }
1972 }
1973
1974 /// Checks if the buffer and its file have both changed since the buffer
1975 /// was last saved or reloaded.
1976 pub fn has_conflict(&self) -> bool {
1977 if self.has_conflict {
1978 return true;
1979 }
1980 let Some(file) = self.file.as_ref() else {
1981 return false;
1982 };
1983 match file.disk_state() {
1984 DiskState::New => false,
1985 DiskState::Present { mtime } => match self.saved_mtime {
1986 Some(saved_mtime) => {
1987 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1988 }
1989 None => true,
1990 },
1991 DiskState::Deleted => false,
1992 }
1993 }
1994
1995 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1996 pub fn subscribe(&mut self) -> Subscription {
1997 self.text.subscribe()
1998 }
1999
2000 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2001 ///
2002 /// This allows downstream code to check if the buffer's text has changed without
2003 /// waiting for an effect cycle, which would be required if using eents.
2004 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2005 if let Err(ix) = self
2006 .change_bits
2007 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2008 {
2009 self.change_bits.insert(ix, bit);
2010 }
2011 }
2012
2013 fn was_changed(&mut self) {
2014 self.change_bits.retain(|change_bit| {
2015 change_bit.upgrade().is_some_and(|bit| {
2016 bit.replace(true);
2017 true
2018 })
2019 });
2020 }
2021
2022 /// Starts a transaction, if one is not already in-progress. When undoing or
2023 /// redoing edits, all of the edits performed within a transaction are undone
2024 /// or redone together.
2025 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2026 self.start_transaction_at(Instant::now())
2027 }
2028
2029 /// Starts a transaction, providing the current time. Subsequent transactions
2030 /// that occur within a short period of time will be grouped together. This
2031 /// is controlled by the buffer's undo grouping duration.
2032 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2033 self.transaction_depth += 1;
2034 if self.was_dirty_before_starting_transaction.is_none() {
2035 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2036 }
2037 self.text.start_transaction_at(now)
2038 }
2039
2040 /// Terminates the current transaction, if this is the outermost transaction.
2041 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2042 self.end_transaction_at(Instant::now(), cx)
2043 }
2044
2045 /// Terminates the current transaction, providing the current time. Subsequent transactions
2046 /// that occur within a short period of time will be grouped together. This
2047 /// is controlled by the buffer's undo grouping duration.
2048 pub fn end_transaction_at(
2049 &mut self,
2050 now: Instant,
2051 cx: &mut Context<Self>,
2052 ) -> Option<TransactionId> {
2053 assert!(self.transaction_depth > 0);
2054 self.transaction_depth -= 1;
2055 let was_dirty = if self.transaction_depth == 0 {
2056 self.was_dirty_before_starting_transaction.take().unwrap()
2057 } else {
2058 false
2059 };
2060 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2061 self.did_edit(&start_version, was_dirty, cx);
2062 Some(transaction_id)
2063 } else {
2064 None
2065 }
2066 }
2067
2068 /// Manually add a transaction to the buffer's undo history.
2069 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2070 self.text.push_transaction(transaction, now);
2071 }
2072
2073 /// Differs from `push_transaction` in that it does not clear the redo
2074 /// stack. Intended to be used to create a parent transaction to merge
2075 /// potential child transactions into.
2076 ///
2077 /// The caller is responsible for removing it from the undo history using
2078 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2079 /// are merged into this transaction, the caller is responsible for ensuring
2080 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2081 /// cleared is to create transactions with the usual `start_transaction` and
2082 /// `end_transaction` methods and merging the resulting transactions into
2083 /// the transaction created by this method
2084 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2085 self.text.push_empty_transaction(now)
2086 }
2087
2088 /// Prevent the last transaction from being grouped with any subsequent transactions,
2089 /// even if they occur with the buffer's undo grouping duration.
2090 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2091 self.text.finalize_last_transaction()
2092 }
2093
2094 /// Manually group all changes since a given transaction.
2095 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2096 self.text.group_until_transaction(transaction_id);
2097 }
2098
2099 /// Manually remove a transaction from the buffer's undo history
2100 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2101 self.text.forget_transaction(transaction_id)
2102 }
2103
2104 /// Retrieve a transaction from the buffer's undo history
2105 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2106 self.text.get_transaction(transaction_id)
2107 }
2108
2109 /// Manually merge two transactions in the buffer's undo history.
2110 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2111 self.text.merge_transactions(transaction, destination);
2112 }
2113
2114 /// Waits for the buffer to receive operations with the given timestamps.
2115 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2116 &mut self,
2117 edit_ids: It,
2118 ) -> impl Future<Output = Result<()>> + use<It> {
2119 self.text.wait_for_edits(edit_ids)
2120 }
2121
2122 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2123 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2124 &mut self,
2125 anchors: It,
2126 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2127 self.text.wait_for_anchors(anchors)
2128 }
2129
2130 /// Waits for the buffer to receive operations up to the given version.
2131 pub fn wait_for_version(
2132 &mut self,
2133 version: clock::Global,
2134 ) -> impl Future<Output = Result<()>> + use<> {
2135 self.text.wait_for_version(version)
2136 }
2137
2138 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2139 /// [`Buffer::wait_for_version`] to resolve with an error.
2140 pub fn give_up_waiting(&mut self) {
2141 self.text.give_up_waiting();
2142 }
2143
2144 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2145 let mut rx = None;
2146 if !self.autoindent_requests.is_empty() {
2147 let channel = oneshot::channel();
2148 self.wait_for_autoindent_txs.push(channel.0);
2149 rx = Some(channel.1);
2150 }
2151 rx
2152 }
2153
2154 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2155 pub fn set_active_selections(
2156 &mut self,
2157 selections: Arc<[Selection<Anchor>]>,
2158 line_mode: bool,
2159 cursor_shape: CursorShape,
2160 cx: &mut Context<Self>,
2161 ) {
2162 let lamport_timestamp = self.text.lamport_clock.tick();
2163 self.remote_selections.insert(
2164 self.text.replica_id(),
2165 SelectionSet {
2166 selections: selections.clone(),
2167 lamport_timestamp,
2168 line_mode,
2169 cursor_shape,
2170 },
2171 );
2172 self.send_operation(
2173 Operation::UpdateSelections {
2174 selections,
2175 line_mode,
2176 lamport_timestamp,
2177 cursor_shape,
2178 },
2179 true,
2180 cx,
2181 );
2182 self.non_text_state_update_count += 1;
2183 cx.notify();
2184 }
2185
2186 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2187 /// this replica.
2188 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2189 if self
2190 .remote_selections
2191 .get(&self.text.replica_id())
2192 .is_none_or(|set| !set.selections.is_empty())
2193 {
2194 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2195 }
2196 }
2197
2198 pub fn set_agent_selections(
2199 &mut self,
2200 selections: Arc<[Selection<Anchor>]>,
2201 line_mode: bool,
2202 cursor_shape: CursorShape,
2203 cx: &mut Context<Self>,
2204 ) {
2205 let lamport_timestamp = self.text.lamport_clock.tick();
2206 self.remote_selections.insert(
2207 AGENT_REPLICA_ID,
2208 SelectionSet {
2209 selections: selections.clone(),
2210 lamport_timestamp,
2211 line_mode,
2212 cursor_shape,
2213 },
2214 );
2215 self.non_text_state_update_count += 1;
2216 cx.notify();
2217 }
2218
2219 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2220 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2221 }
2222
2223 /// Replaces the buffer's entire text.
2224 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2225 where
2226 T: Into<Arc<str>>,
2227 {
2228 self.autoindent_requests.clear();
2229 self.edit([(0..self.len(), text)], None, cx)
2230 }
2231
2232 /// Appends the given text to the end of the buffer.
2233 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2234 where
2235 T: Into<Arc<str>>,
2236 {
2237 self.edit([(self.len()..self.len(), text)], None, cx)
2238 }
2239
2240 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2241 /// delete, and a string of text to insert at that location.
2242 ///
2243 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2244 /// request for the edited ranges, which will be processed when the buffer finishes
2245 /// parsing.
2246 ///
2247 /// Parsing takes place at the end of a transaction, and may compute synchronously
2248 /// or asynchronously, depending on the changes.
2249 pub fn edit<I, S, T>(
2250 &mut self,
2251 edits_iter: I,
2252 autoindent_mode: Option<AutoindentMode>,
2253 cx: &mut Context<Self>,
2254 ) -> Option<clock::Lamport>
2255 where
2256 I: IntoIterator<Item = (Range<S>, T)>,
2257 S: ToOffset,
2258 T: Into<Arc<str>>,
2259 {
2260 // Skip invalid edits and coalesce contiguous ones.
2261 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2262
2263 for (range, new_text) in edits_iter {
2264 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2265
2266 if range.start > range.end {
2267 mem::swap(&mut range.start, &mut range.end);
2268 }
2269 let new_text = new_text.into();
2270 if !new_text.is_empty() || !range.is_empty() {
2271 if let Some((prev_range, prev_text)) = edits.last_mut()
2272 && prev_range.end >= range.start
2273 {
2274 prev_range.end = cmp::max(prev_range.end, range.end);
2275 *prev_text = format!("{prev_text}{new_text}").into();
2276 } else {
2277 edits.push((range, new_text));
2278 }
2279 }
2280 }
2281 if edits.is_empty() {
2282 return None;
2283 }
2284
2285 self.start_transaction();
2286 self.pending_autoindent.take();
2287 let autoindent_request = autoindent_mode
2288 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2289
2290 let edit_operation = self.text.edit(edits.iter().cloned());
2291 let edit_id = edit_operation.timestamp();
2292
2293 if let Some((before_edit, mode)) = autoindent_request {
2294 let mut delta = 0isize;
2295 let mut previous_setting = None;
2296 let entries: Vec<_> = edits
2297 .into_iter()
2298 .enumerate()
2299 .zip(&edit_operation.as_edit().unwrap().new_text)
2300 .filter(|((_, (range, _)), _)| {
2301 let language = before_edit.language_at(range.start);
2302 let language_id = language.map(|l| l.id());
2303 if let Some((cached_language_id, auto_indent)) = previous_setting
2304 && cached_language_id == language_id
2305 {
2306 auto_indent
2307 } else {
2308 // The auto-indent setting is not present in editorconfigs, hence
2309 // we can avoid passing the file here.
2310 let auto_indent =
2311 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2312 previous_setting = Some((language_id, auto_indent));
2313 auto_indent
2314 }
2315 })
2316 .map(|((ix, (range, _)), new_text)| {
2317 let new_text_length = new_text.len();
2318 let old_start = range.start.to_point(&before_edit);
2319 let new_start = (delta + range.start as isize) as usize;
2320 let range_len = range.end - range.start;
2321 delta += new_text_length as isize - range_len as isize;
2322
2323 // Decide what range of the insertion to auto-indent, and whether
2324 // the first line of the insertion should be considered a newly-inserted line
2325 // or an edit to an existing line.
2326 let mut range_of_insertion_to_indent = 0..new_text_length;
2327 let mut first_line_is_new = true;
2328
2329 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2330 let old_line_end = before_edit.line_len(old_start.row);
2331
2332 if old_start.column > old_line_start {
2333 first_line_is_new = false;
2334 }
2335
2336 if !new_text.contains('\n')
2337 && (old_start.column + (range_len as u32) < old_line_end
2338 || old_line_end == old_line_start)
2339 {
2340 first_line_is_new = false;
2341 }
2342
2343 // When inserting text starting with a newline, avoid auto-indenting the
2344 // previous line.
2345 if new_text.starts_with('\n') {
2346 range_of_insertion_to_indent.start += 1;
2347 first_line_is_new = true;
2348 }
2349
2350 let mut original_indent_column = None;
2351 if let AutoindentMode::Block {
2352 original_indent_columns,
2353 } = &mode
2354 {
2355 original_indent_column = Some(if new_text.starts_with('\n') {
2356 indent_size_for_text(
2357 new_text[range_of_insertion_to_indent.clone()].chars(),
2358 )
2359 .len
2360 } else {
2361 original_indent_columns
2362 .get(ix)
2363 .copied()
2364 .flatten()
2365 .unwrap_or_else(|| {
2366 indent_size_for_text(
2367 new_text[range_of_insertion_to_indent.clone()].chars(),
2368 )
2369 .len
2370 })
2371 });
2372
2373 // Avoid auto-indenting the line after the edit.
2374 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2375 range_of_insertion_to_indent.end -= 1;
2376 }
2377 }
2378
2379 AutoindentRequestEntry {
2380 first_line_is_new,
2381 original_indent_column,
2382 indent_size: before_edit.language_indent_size_at(range.start, cx),
2383 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2384 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2385 }
2386 })
2387 .collect();
2388
2389 if !entries.is_empty() {
2390 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2391 before_edit,
2392 entries,
2393 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2394 ignore_empty_lines: false,
2395 }));
2396 }
2397 }
2398
2399 self.end_transaction(cx);
2400 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2401 Some(edit_id)
2402 }
2403
2404 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2405 self.was_changed();
2406
2407 if self.edits_since::<usize>(old_version).next().is_none() {
2408 return;
2409 }
2410
2411 self.reparse(cx);
2412 cx.emit(BufferEvent::Edited);
2413 if was_dirty != self.is_dirty() {
2414 cx.emit(BufferEvent::DirtyChanged);
2415 }
2416 cx.notify();
2417 }
2418
2419 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2420 where
2421 I: IntoIterator<Item = Range<T>>,
2422 T: ToOffset + Copy,
2423 {
2424 let before_edit = self.snapshot();
2425 let entries = ranges
2426 .into_iter()
2427 .map(|range| AutoindentRequestEntry {
2428 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2429 first_line_is_new: true,
2430 indent_size: before_edit.language_indent_size_at(range.start, cx),
2431 original_indent_column: None,
2432 })
2433 .collect();
2434 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2435 before_edit,
2436 entries,
2437 is_block_mode: false,
2438 ignore_empty_lines: true,
2439 }));
2440 self.request_autoindent(cx);
2441 }
2442
2443 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2444 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2445 pub fn insert_empty_line(
2446 &mut self,
2447 position: impl ToPoint,
2448 space_above: bool,
2449 space_below: bool,
2450 cx: &mut Context<Self>,
2451 ) -> Point {
2452 let mut position = position.to_point(self);
2453
2454 self.start_transaction();
2455
2456 self.edit(
2457 [(position..position, "\n")],
2458 Some(AutoindentMode::EachLine),
2459 cx,
2460 );
2461
2462 if position.column > 0 {
2463 position += Point::new(1, 0);
2464 }
2465
2466 if !self.is_line_blank(position.row) {
2467 self.edit(
2468 [(position..position, "\n")],
2469 Some(AutoindentMode::EachLine),
2470 cx,
2471 );
2472 }
2473
2474 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2475 self.edit(
2476 [(position..position, "\n")],
2477 Some(AutoindentMode::EachLine),
2478 cx,
2479 );
2480 position.row += 1;
2481 }
2482
2483 if space_below
2484 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2485 {
2486 self.edit(
2487 [(position..position, "\n")],
2488 Some(AutoindentMode::EachLine),
2489 cx,
2490 );
2491 }
2492
2493 self.end_transaction(cx);
2494
2495 position
2496 }
2497
2498 /// Applies the given remote operations to the buffer.
2499 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2500 self.pending_autoindent.take();
2501 let was_dirty = self.is_dirty();
2502 let old_version = self.version.clone();
2503 let mut deferred_ops = Vec::new();
2504 let buffer_ops = ops
2505 .into_iter()
2506 .filter_map(|op| match op {
2507 Operation::Buffer(op) => Some(op),
2508 _ => {
2509 if self.can_apply_op(&op) {
2510 self.apply_op(op, cx);
2511 } else {
2512 deferred_ops.push(op);
2513 }
2514 None
2515 }
2516 })
2517 .collect::<Vec<_>>();
2518 for operation in buffer_ops.iter() {
2519 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2520 }
2521 self.text.apply_ops(buffer_ops);
2522 self.deferred_ops.insert(deferred_ops);
2523 self.flush_deferred_ops(cx);
2524 self.did_edit(&old_version, was_dirty, cx);
2525 // Notify independently of whether the buffer was edited as the operations could include a
2526 // selection update.
2527 cx.notify();
2528 }
2529
2530 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2531 let mut deferred_ops = Vec::new();
2532 for op in self.deferred_ops.drain().iter().cloned() {
2533 if self.can_apply_op(&op) {
2534 self.apply_op(op, cx);
2535 } else {
2536 deferred_ops.push(op);
2537 }
2538 }
2539 self.deferred_ops.insert(deferred_ops);
2540 }
2541
2542 pub fn has_deferred_ops(&self) -> bool {
2543 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2544 }
2545
2546 fn can_apply_op(&self, operation: &Operation) -> bool {
2547 match operation {
2548 Operation::Buffer(_) => {
2549 unreachable!("buffer operations should never be applied at this layer")
2550 }
2551 Operation::UpdateDiagnostics {
2552 diagnostics: diagnostic_set,
2553 ..
2554 } => diagnostic_set.iter().all(|diagnostic| {
2555 self.text.can_resolve(&diagnostic.range.start)
2556 && self.text.can_resolve(&diagnostic.range.end)
2557 }),
2558 Operation::UpdateSelections { selections, .. } => selections
2559 .iter()
2560 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2561 Operation::UpdateCompletionTriggers { .. } => true,
2562 }
2563 }
2564
2565 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2566 match operation {
2567 Operation::Buffer(_) => {
2568 unreachable!("buffer operations should never be applied at this layer")
2569 }
2570 Operation::UpdateDiagnostics {
2571 server_id,
2572 diagnostics: diagnostic_set,
2573 lamport_timestamp,
2574 } => {
2575 let snapshot = self.snapshot();
2576 self.apply_diagnostic_update(
2577 server_id,
2578 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2579 lamport_timestamp,
2580 cx,
2581 );
2582 }
2583 Operation::UpdateSelections {
2584 selections,
2585 lamport_timestamp,
2586 line_mode,
2587 cursor_shape,
2588 } => {
2589 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2590 && set.lamport_timestamp > lamport_timestamp
2591 {
2592 return;
2593 }
2594
2595 self.remote_selections.insert(
2596 lamport_timestamp.replica_id,
2597 SelectionSet {
2598 selections,
2599 lamport_timestamp,
2600 line_mode,
2601 cursor_shape,
2602 },
2603 );
2604 self.text.lamport_clock.observe(lamport_timestamp);
2605 self.non_text_state_update_count += 1;
2606 }
2607 Operation::UpdateCompletionTriggers {
2608 triggers,
2609 lamport_timestamp,
2610 server_id,
2611 } => {
2612 if triggers.is_empty() {
2613 self.completion_triggers_per_language_server
2614 .remove(&server_id);
2615 self.completion_triggers = self
2616 .completion_triggers_per_language_server
2617 .values()
2618 .flat_map(|triggers| triggers.iter().cloned())
2619 .collect();
2620 } else {
2621 self.completion_triggers_per_language_server
2622 .insert(server_id, triggers.iter().cloned().collect());
2623 self.completion_triggers.extend(triggers);
2624 }
2625 self.text.lamport_clock.observe(lamport_timestamp);
2626 }
2627 }
2628 }
2629
2630 fn apply_diagnostic_update(
2631 &mut self,
2632 server_id: LanguageServerId,
2633 diagnostics: DiagnosticSet,
2634 lamport_timestamp: clock::Lamport,
2635 cx: &mut Context<Self>,
2636 ) {
2637 if lamport_timestamp > self.diagnostics_timestamp {
2638 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2639 if diagnostics.is_empty() {
2640 if let Ok(ix) = ix {
2641 self.diagnostics.remove(ix);
2642 }
2643 } else {
2644 match ix {
2645 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2646 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2647 };
2648 }
2649 self.diagnostics_timestamp = lamport_timestamp;
2650 self.non_text_state_update_count += 1;
2651 self.text.lamport_clock.observe(lamport_timestamp);
2652 cx.notify();
2653 cx.emit(BufferEvent::DiagnosticsUpdated);
2654 }
2655 }
2656
2657 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2658 self.was_changed();
2659 cx.emit(BufferEvent::Operation {
2660 operation,
2661 is_local,
2662 });
2663 }
2664
2665 /// Removes the selections for a given peer.
2666 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2667 self.remote_selections.remove(&replica_id);
2668 cx.notify();
2669 }
2670
2671 /// Undoes the most recent transaction.
2672 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2673 let was_dirty = self.is_dirty();
2674 let old_version = self.version.clone();
2675
2676 if let Some((transaction_id, operation)) = self.text.undo() {
2677 self.send_operation(Operation::Buffer(operation), true, cx);
2678 self.did_edit(&old_version, was_dirty, cx);
2679 Some(transaction_id)
2680 } else {
2681 None
2682 }
2683 }
2684
2685 /// Manually undoes a specific transaction in the buffer's undo history.
2686 pub fn undo_transaction(
2687 &mut self,
2688 transaction_id: TransactionId,
2689 cx: &mut Context<Self>,
2690 ) -> bool {
2691 let was_dirty = self.is_dirty();
2692 let old_version = self.version.clone();
2693 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2694 self.send_operation(Operation::Buffer(operation), true, cx);
2695 self.did_edit(&old_version, was_dirty, cx);
2696 true
2697 } else {
2698 false
2699 }
2700 }
2701
2702 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2703 pub fn undo_to_transaction(
2704 &mut self,
2705 transaction_id: TransactionId,
2706 cx: &mut Context<Self>,
2707 ) -> bool {
2708 let was_dirty = self.is_dirty();
2709 let old_version = self.version.clone();
2710
2711 let operations = self.text.undo_to_transaction(transaction_id);
2712 let undone = !operations.is_empty();
2713 for operation in operations {
2714 self.send_operation(Operation::Buffer(operation), true, cx);
2715 }
2716 if undone {
2717 self.did_edit(&old_version, was_dirty, cx)
2718 }
2719 undone
2720 }
2721
2722 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2723 let was_dirty = self.is_dirty();
2724 let operation = self.text.undo_operations(counts);
2725 let old_version = self.version.clone();
2726 self.send_operation(Operation::Buffer(operation), true, cx);
2727 self.did_edit(&old_version, was_dirty, cx);
2728 }
2729
2730 /// Manually redoes a specific transaction in the buffer's redo history.
2731 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2732 let was_dirty = self.is_dirty();
2733 let old_version = self.version.clone();
2734
2735 if let Some((transaction_id, operation)) = self.text.redo() {
2736 self.send_operation(Operation::Buffer(operation), true, cx);
2737 self.did_edit(&old_version, was_dirty, cx);
2738 Some(transaction_id)
2739 } else {
2740 None
2741 }
2742 }
2743
2744 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2745 pub fn redo_to_transaction(
2746 &mut self,
2747 transaction_id: TransactionId,
2748 cx: &mut Context<Self>,
2749 ) -> bool {
2750 let was_dirty = self.is_dirty();
2751 let old_version = self.version.clone();
2752
2753 let operations = self.text.redo_to_transaction(transaction_id);
2754 let redone = !operations.is_empty();
2755 for operation in operations {
2756 self.send_operation(Operation::Buffer(operation), true, cx);
2757 }
2758 if redone {
2759 self.did_edit(&old_version, was_dirty, cx)
2760 }
2761 redone
2762 }
2763
2764 /// Override current completion triggers with the user-provided completion triggers.
2765 pub fn set_completion_triggers(
2766 &mut self,
2767 server_id: LanguageServerId,
2768 triggers: BTreeSet<String>,
2769 cx: &mut Context<Self>,
2770 ) {
2771 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2772 if triggers.is_empty() {
2773 self.completion_triggers_per_language_server
2774 .remove(&server_id);
2775 self.completion_triggers = self
2776 .completion_triggers_per_language_server
2777 .values()
2778 .flat_map(|triggers| triggers.iter().cloned())
2779 .collect();
2780 } else {
2781 self.completion_triggers_per_language_server
2782 .insert(server_id, triggers.clone());
2783 self.completion_triggers.extend(triggers.iter().cloned());
2784 }
2785 self.send_operation(
2786 Operation::UpdateCompletionTriggers {
2787 triggers: triggers.into_iter().collect(),
2788 lamport_timestamp: self.completion_triggers_timestamp,
2789 server_id,
2790 },
2791 true,
2792 cx,
2793 );
2794 cx.notify();
2795 }
2796
2797 /// Returns a list of strings which trigger a completion menu for this language.
2798 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2799 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2800 &self.completion_triggers
2801 }
2802
2803 /// Call this directly after performing edits to prevent the preview tab
2804 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2805 /// to return false until there are additional edits.
2806 pub fn refresh_preview(&mut self) {
2807 self.preview_version = self.version.clone();
2808 }
2809
2810 /// Whether we should preserve the preview status of a tab containing this buffer.
2811 pub fn preserve_preview(&self) -> bool {
2812 !self.has_edits_since(&self.preview_version)
2813 }
2814}
2815
2816#[doc(hidden)]
2817#[cfg(any(test, feature = "test-support"))]
2818impl Buffer {
2819 pub fn edit_via_marked_text(
2820 &mut self,
2821 marked_string: &str,
2822 autoindent_mode: Option<AutoindentMode>,
2823 cx: &mut Context<Self>,
2824 ) {
2825 let edits = self.edits_for_marked_text(marked_string);
2826 self.edit(edits, autoindent_mode, cx);
2827 }
2828
2829 pub fn set_group_interval(&mut self, group_interval: Duration) {
2830 self.text.set_group_interval(group_interval);
2831 }
2832
2833 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2834 where
2835 T: rand::Rng,
2836 {
2837 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2838 let mut last_end = None;
2839 for _ in 0..old_range_count {
2840 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2841 break;
2842 }
2843
2844 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2845 let mut range = self.random_byte_range(new_start, rng);
2846 if rng.gen_bool(0.2) {
2847 mem::swap(&mut range.start, &mut range.end);
2848 }
2849 last_end = Some(range.end);
2850
2851 let new_text_len = rng.gen_range(0..10);
2852 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2853 new_text = new_text.to_uppercase();
2854
2855 edits.push((range, new_text));
2856 }
2857 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2858 self.edit(edits, None, cx);
2859 }
2860
2861 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2862 let was_dirty = self.is_dirty();
2863 let old_version = self.version.clone();
2864
2865 let ops = self.text.randomly_undo_redo(rng);
2866 if !ops.is_empty() {
2867 for op in ops {
2868 self.send_operation(Operation::Buffer(op), true, cx);
2869 self.did_edit(&old_version, was_dirty, cx);
2870 }
2871 }
2872 }
2873}
2874
2875impl EventEmitter<BufferEvent> for Buffer {}
2876
2877impl Deref for Buffer {
2878 type Target = TextBuffer;
2879
2880 fn deref(&self) -> &Self::Target {
2881 &self.text
2882 }
2883}
2884
2885impl BufferSnapshot {
2886 /// Returns [`IndentSize`] for a given line that respects user settings and
2887 /// language preferences.
2888 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2889 indent_size_for_line(self, row)
2890 }
2891
2892 /// Returns [`IndentSize`] for a given position that respects user settings
2893 /// and language preferences.
2894 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2895 let settings = language_settings(
2896 self.language_at(position).map(|l| l.name()),
2897 self.file(),
2898 cx,
2899 );
2900 if settings.hard_tabs {
2901 IndentSize::tab()
2902 } else {
2903 IndentSize::spaces(settings.tab_size.get())
2904 }
2905 }
2906
2907 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2908 /// is passed in as `single_indent_size`.
2909 pub fn suggested_indents(
2910 &self,
2911 rows: impl Iterator<Item = u32>,
2912 single_indent_size: IndentSize,
2913 ) -> BTreeMap<u32, IndentSize> {
2914 let mut result = BTreeMap::new();
2915
2916 for row_range in contiguous_ranges(rows, 10) {
2917 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2918 Some(suggestions) => suggestions,
2919 _ => break,
2920 };
2921
2922 for (row, suggestion) in row_range.zip(suggestions) {
2923 let indent_size = if let Some(suggestion) = suggestion {
2924 result
2925 .get(&suggestion.basis_row)
2926 .copied()
2927 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2928 .with_delta(suggestion.delta, single_indent_size)
2929 } else {
2930 self.indent_size_for_line(row)
2931 };
2932
2933 result.insert(row, indent_size);
2934 }
2935 }
2936
2937 result
2938 }
2939
2940 fn suggest_autoindents(
2941 &self,
2942 row_range: Range<u32>,
2943 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2944 let config = &self.language.as_ref()?.config;
2945 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2946
2947 #[derive(Debug, Clone)]
2948 struct StartPosition {
2949 start: Point,
2950 suffix: SharedString,
2951 }
2952
2953 // Find the suggested indentation ranges based on the syntax tree.
2954 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2955 let end = Point::new(row_range.end, 0);
2956 let range = (start..end).to_offset(&self.text);
2957 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2958 Some(&grammar.indents_config.as_ref()?.query)
2959 });
2960 let indent_configs = matches
2961 .grammars()
2962 .iter()
2963 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2964 .collect::<Vec<_>>();
2965
2966 let mut indent_ranges = Vec::<Range<Point>>::new();
2967 let mut start_positions = Vec::<StartPosition>::new();
2968 let mut outdent_positions = Vec::<Point>::new();
2969 while let Some(mat) = matches.peek() {
2970 let mut start: Option<Point> = None;
2971 let mut end: Option<Point> = None;
2972
2973 let config = indent_configs[mat.grammar_index];
2974 for capture in mat.captures {
2975 if capture.index == config.indent_capture_ix {
2976 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2977 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2978 } else if Some(capture.index) == config.start_capture_ix {
2979 start = Some(Point::from_ts_point(capture.node.end_position()));
2980 } else if Some(capture.index) == config.end_capture_ix {
2981 end = Some(Point::from_ts_point(capture.node.start_position()));
2982 } else if Some(capture.index) == config.outdent_capture_ix {
2983 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2984 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2985 start_positions.push(StartPosition {
2986 start: Point::from_ts_point(capture.node.start_position()),
2987 suffix: suffix.clone(),
2988 });
2989 }
2990 }
2991
2992 matches.advance();
2993 if let Some((start, end)) = start.zip(end) {
2994 if start.row == end.row {
2995 continue;
2996 }
2997 let range = start..end;
2998 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2999 Err(ix) => indent_ranges.insert(ix, range),
3000 Ok(ix) => {
3001 let prev_range = &mut indent_ranges[ix];
3002 prev_range.end = prev_range.end.max(range.end);
3003 }
3004 }
3005 }
3006 }
3007
3008 let mut error_ranges = Vec::<Range<Point>>::new();
3009 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3010 grammar.error_query.as_ref()
3011 });
3012 while let Some(mat) = matches.peek() {
3013 let node = mat.captures[0].node;
3014 let start = Point::from_ts_point(node.start_position());
3015 let end = Point::from_ts_point(node.end_position());
3016 let range = start..end;
3017 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3018 Ok(ix) | Err(ix) => ix,
3019 };
3020 let mut end_ix = ix;
3021 while let Some(existing_range) = error_ranges.get(end_ix) {
3022 if existing_range.end < end {
3023 end_ix += 1;
3024 } else {
3025 break;
3026 }
3027 }
3028 error_ranges.splice(ix..end_ix, [range]);
3029 matches.advance();
3030 }
3031
3032 outdent_positions.sort();
3033 for outdent_position in outdent_positions {
3034 // find the innermost indent range containing this outdent_position
3035 // set its end to the outdent position
3036 if let Some(range_to_truncate) = indent_ranges
3037 .iter_mut()
3038 .filter(|indent_range| indent_range.contains(&outdent_position))
3039 .next_back()
3040 {
3041 range_to_truncate.end = outdent_position;
3042 }
3043 }
3044
3045 start_positions.sort_by_key(|b| b.start);
3046
3047 // Find the suggested indentation increases and decreased based on regexes.
3048 let mut regex_outdent_map = HashMap::default();
3049 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3050 let mut start_positions_iter = start_positions.iter().peekable();
3051
3052 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3053 self.for_each_line(
3054 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3055 ..Point::new(row_range.end, 0),
3056 |row, line| {
3057 if config
3058 .decrease_indent_pattern
3059 .as_ref()
3060 .is_some_and(|regex| regex.is_match(line))
3061 {
3062 indent_change_rows.push((row, Ordering::Less));
3063 }
3064 if config
3065 .increase_indent_pattern
3066 .as_ref()
3067 .is_some_and(|regex| regex.is_match(line))
3068 {
3069 indent_change_rows.push((row + 1, Ordering::Greater));
3070 }
3071 while let Some(pos) = start_positions_iter.peek() {
3072 if pos.start.row < row {
3073 let pos = start_positions_iter.next().unwrap();
3074 last_seen_suffix
3075 .entry(pos.suffix.to_string())
3076 .or_default()
3077 .push(pos.start);
3078 } else {
3079 break;
3080 }
3081 }
3082 for rule in &config.decrease_indent_patterns {
3083 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3084 let row_start_column = self.indent_size_for_line(row).len;
3085 let basis_row = rule
3086 .valid_after
3087 .iter()
3088 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3089 .flatten()
3090 .filter(|start_point| start_point.column <= row_start_column)
3091 .max_by_key(|start_point| start_point.row);
3092 if let Some(outdent_to_row) = basis_row {
3093 regex_outdent_map.insert(row, outdent_to_row.row);
3094 }
3095 break;
3096 }
3097 }
3098 },
3099 );
3100
3101 let mut indent_changes = indent_change_rows.into_iter().peekable();
3102 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3103 prev_non_blank_row.unwrap_or(0)
3104 } else {
3105 row_range.start.saturating_sub(1)
3106 };
3107
3108 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3109 Some(row_range.map(move |row| {
3110 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3111
3112 let mut indent_from_prev_row = false;
3113 let mut outdent_from_prev_row = false;
3114 let mut outdent_to_row = u32::MAX;
3115 let mut from_regex = false;
3116
3117 while let Some((indent_row, delta)) = indent_changes.peek() {
3118 match indent_row.cmp(&row) {
3119 Ordering::Equal => match delta {
3120 Ordering::Less => {
3121 from_regex = true;
3122 outdent_from_prev_row = true
3123 }
3124 Ordering::Greater => {
3125 indent_from_prev_row = true;
3126 from_regex = true
3127 }
3128 _ => {}
3129 },
3130
3131 Ordering::Greater => break,
3132 Ordering::Less => {}
3133 }
3134
3135 indent_changes.next();
3136 }
3137
3138 for range in &indent_ranges {
3139 if range.start.row >= row {
3140 break;
3141 }
3142 if range.start.row == prev_row && range.end > row_start {
3143 indent_from_prev_row = true;
3144 }
3145 if range.end > prev_row_start && range.end <= row_start {
3146 outdent_to_row = outdent_to_row.min(range.start.row);
3147 }
3148 }
3149
3150 if let Some(basis_row) = regex_outdent_map.get(&row) {
3151 indent_from_prev_row = false;
3152 outdent_to_row = *basis_row;
3153 from_regex = true;
3154 }
3155
3156 let within_error = error_ranges
3157 .iter()
3158 .any(|e| e.start.row < row && e.end > row_start);
3159
3160 let suggestion = if outdent_to_row == prev_row
3161 || (outdent_from_prev_row && indent_from_prev_row)
3162 {
3163 Some(IndentSuggestion {
3164 basis_row: prev_row,
3165 delta: Ordering::Equal,
3166 within_error: within_error && !from_regex,
3167 })
3168 } else if indent_from_prev_row {
3169 Some(IndentSuggestion {
3170 basis_row: prev_row,
3171 delta: Ordering::Greater,
3172 within_error: within_error && !from_regex,
3173 })
3174 } else if outdent_to_row < prev_row {
3175 Some(IndentSuggestion {
3176 basis_row: outdent_to_row,
3177 delta: Ordering::Equal,
3178 within_error: within_error && !from_regex,
3179 })
3180 } else if outdent_from_prev_row {
3181 Some(IndentSuggestion {
3182 basis_row: prev_row,
3183 delta: Ordering::Less,
3184 within_error: within_error && !from_regex,
3185 })
3186 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3187 {
3188 Some(IndentSuggestion {
3189 basis_row: prev_row,
3190 delta: Ordering::Equal,
3191 within_error: within_error && !from_regex,
3192 })
3193 } else {
3194 None
3195 };
3196
3197 prev_row = row;
3198 prev_row_start = row_start;
3199 suggestion
3200 }))
3201 }
3202
3203 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3204 while row > 0 {
3205 row -= 1;
3206 if !self.is_line_blank(row) {
3207 return Some(row);
3208 }
3209 }
3210 None
3211 }
3212
3213 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3214 let captures = self.syntax.captures(range, &self.text, |grammar| {
3215 grammar.highlights_query.as_ref()
3216 });
3217 let highlight_maps = captures
3218 .grammars()
3219 .iter()
3220 .map(|grammar| grammar.highlight_map())
3221 .collect();
3222 (captures, highlight_maps)
3223 }
3224
3225 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3226 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3227 /// returned in chunks where each chunk has a single syntax highlighting style and
3228 /// diagnostic status.
3229 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3230 let range = range.start.to_offset(self)..range.end.to_offset(self);
3231
3232 let mut syntax = None;
3233 if language_aware {
3234 syntax = Some(self.get_highlights(range.clone()));
3235 }
3236 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3237 let diagnostics = language_aware;
3238 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3239 }
3240
3241 pub fn highlighted_text_for_range<T: ToOffset>(
3242 &self,
3243 range: Range<T>,
3244 override_style: Option<HighlightStyle>,
3245 syntax_theme: &SyntaxTheme,
3246 ) -> HighlightedText {
3247 HighlightedText::from_buffer_range(
3248 range,
3249 &self.text,
3250 &self.syntax,
3251 override_style,
3252 syntax_theme,
3253 )
3254 }
3255
3256 /// Invokes the given callback for each line of text in the given range of the buffer.
3257 /// Uses callback to avoid allocating a string for each line.
3258 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3259 let mut line = String::new();
3260 let mut row = range.start.row;
3261 for chunk in self
3262 .as_rope()
3263 .chunks_in_range(range.to_offset(self))
3264 .chain(["\n"])
3265 {
3266 for (newline_ix, text) in chunk.split('\n').enumerate() {
3267 if newline_ix > 0 {
3268 callback(row, &line);
3269 row += 1;
3270 line.clear();
3271 }
3272 line.push_str(text);
3273 }
3274 }
3275 }
3276
3277 /// Iterates over every [`SyntaxLayer`] in the buffer.
3278 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3279 self.syntax
3280 .layers_for_range(0..self.len(), &self.text, true)
3281 }
3282
3283 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3284 let offset = position.to_offset(self);
3285 self.syntax
3286 .layers_for_range(offset..offset, &self.text, false)
3287 .filter(|l| l.node().end_byte() > offset)
3288 .last()
3289 }
3290
3291 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3292 &self,
3293 range: Range<D>,
3294 ) -> Option<SyntaxLayer<'_>> {
3295 let range = range.to_offset(self);
3296 self.syntax
3297 .layers_for_range(range, &self.text, false)
3298 .max_by(|a, b| {
3299 if a.depth != b.depth {
3300 a.depth.cmp(&b.depth)
3301 } else if a.offset.0 != b.offset.0 {
3302 a.offset.0.cmp(&b.offset.0)
3303 } else {
3304 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3305 }
3306 })
3307 }
3308
3309 /// Returns the main [`Language`].
3310 pub fn language(&self) -> Option<&Arc<Language>> {
3311 self.language.as_ref()
3312 }
3313
3314 /// Returns the [`Language`] at the given location.
3315 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3316 self.syntax_layer_at(position)
3317 .map(|info| info.language)
3318 .or(self.language.as_ref())
3319 }
3320
3321 /// Returns the settings for the language at the given location.
3322 pub fn settings_at<'a, D: ToOffset>(
3323 &'a self,
3324 position: D,
3325 cx: &'a App,
3326 ) -> Cow<'a, LanguageSettings> {
3327 language_settings(
3328 self.language_at(position).map(|l| l.name()),
3329 self.file.as_ref(),
3330 cx,
3331 )
3332 }
3333
3334 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3335 CharClassifier::new(self.language_scope_at(point))
3336 }
3337
3338 /// Returns the [`LanguageScope`] at the given location.
3339 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3340 let offset = position.to_offset(self);
3341 let mut scope = None;
3342 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3343
3344 // Use the layer that has the smallest node intersecting the given point.
3345 for layer in self
3346 .syntax
3347 .layers_for_range(offset..offset, &self.text, false)
3348 {
3349 let mut cursor = layer.node().walk();
3350
3351 let mut range = None;
3352 loop {
3353 let child_range = cursor.node().byte_range();
3354 if !child_range.contains(&offset) {
3355 break;
3356 }
3357
3358 range = Some(child_range);
3359 if cursor.goto_first_child_for_byte(offset).is_none() {
3360 break;
3361 }
3362 }
3363
3364 if let Some(range) = range
3365 && smallest_range_and_depth.as_ref().is_none_or(
3366 |(smallest_range, smallest_range_depth)| {
3367 if layer.depth > *smallest_range_depth {
3368 true
3369 } else if layer.depth == *smallest_range_depth {
3370 range.len() < smallest_range.len()
3371 } else {
3372 false
3373 }
3374 },
3375 )
3376 {
3377 smallest_range_and_depth = Some((range, layer.depth));
3378 scope = Some(LanguageScope {
3379 language: layer.language.clone(),
3380 override_id: layer.override_id(offset, &self.text),
3381 });
3382 }
3383 }
3384
3385 scope.or_else(|| {
3386 self.language.clone().map(|language| LanguageScope {
3387 language,
3388 override_id: None,
3389 })
3390 })
3391 }
3392
3393 /// Returns a tuple of the range and character kind of the word
3394 /// surrounding the given position.
3395 pub fn surrounding_word<T: ToOffset>(
3396 &self,
3397 start: T,
3398 for_completion: bool,
3399 ) -> (Range<usize>, Option<CharKind>) {
3400 let mut start = start.to_offset(self);
3401 let mut end = start;
3402 let mut next_chars = self.chars_at(start).take(128).peekable();
3403 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3404
3405 let classifier = self
3406 .char_classifier_at(start)
3407 .for_completion(for_completion);
3408 let word_kind = cmp::max(
3409 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3410 next_chars.peek().copied().map(|c| classifier.kind(c)),
3411 );
3412
3413 for ch in prev_chars {
3414 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3415 start -= ch.len_utf8();
3416 } else {
3417 break;
3418 }
3419 }
3420
3421 for ch in next_chars {
3422 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3423 end += ch.len_utf8();
3424 } else {
3425 break;
3426 }
3427 }
3428
3429 (start..end, word_kind)
3430 }
3431
3432 /// Returns the closest syntax node enclosing the given range.
3433 pub fn syntax_ancestor<'a, T: ToOffset>(
3434 &'a self,
3435 range: Range<T>,
3436 ) -> Option<tree_sitter::Node<'a>> {
3437 let range = range.start.to_offset(self)..range.end.to_offset(self);
3438 let mut result: Option<tree_sitter::Node<'a>> = None;
3439 'outer: for layer in self
3440 .syntax
3441 .layers_for_range(range.clone(), &self.text, true)
3442 {
3443 let mut cursor = layer.node().walk();
3444
3445 // Descend to the first leaf that touches the start of the range.
3446 //
3447 // If the range is non-empty and the current node ends exactly at the start,
3448 // move to the next sibling to find a node that extends beyond the start.
3449 //
3450 // If the range is empty and the current node starts after the range position,
3451 // move to the previous sibling to find the node that contains the position.
3452 while cursor.goto_first_child_for_byte(range.start).is_some() {
3453 if !range.is_empty() && cursor.node().end_byte() == range.start {
3454 cursor.goto_next_sibling();
3455 }
3456 if range.is_empty() && cursor.node().start_byte() > range.start {
3457 cursor.goto_previous_sibling();
3458 }
3459 }
3460
3461 // Ascend to the smallest ancestor that strictly contains the range.
3462 loop {
3463 let node_range = cursor.node().byte_range();
3464 if node_range.start <= range.start
3465 && node_range.end >= range.end
3466 && node_range.len() > range.len()
3467 {
3468 break;
3469 }
3470 if !cursor.goto_parent() {
3471 continue 'outer;
3472 }
3473 }
3474
3475 let left_node = cursor.node();
3476 let mut layer_result = left_node;
3477
3478 // For an empty range, try to find another node immediately to the right of the range.
3479 if left_node.end_byte() == range.start {
3480 let mut right_node = None;
3481 while !cursor.goto_next_sibling() {
3482 if !cursor.goto_parent() {
3483 break;
3484 }
3485 }
3486
3487 while cursor.node().start_byte() == range.start {
3488 right_node = Some(cursor.node());
3489 if !cursor.goto_first_child() {
3490 break;
3491 }
3492 }
3493
3494 // If there is a candidate node on both sides of the (empty) range, then
3495 // decide between the two by favoring a named node over an anonymous token.
3496 // If both nodes are the same in that regard, favor the right one.
3497 if let Some(right_node) = right_node
3498 && (right_node.is_named() || !left_node.is_named())
3499 {
3500 layer_result = right_node;
3501 }
3502 }
3503
3504 if let Some(previous_result) = &result
3505 && previous_result.byte_range().len() < layer_result.byte_range().len()
3506 {
3507 continue;
3508 }
3509 result = Some(layer_result);
3510 }
3511
3512 result
3513 }
3514
3515 /// Returns the root syntax node within the given row
3516 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3517 let start_offset = position.to_offset(self);
3518
3519 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3520
3521 let layer = self
3522 .syntax
3523 .layers_for_range(start_offset..start_offset, &self.text, true)
3524 .next()?;
3525
3526 let mut cursor = layer.node().walk();
3527
3528 // Descend to the first leaf that touches the start of the range.
3529 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3530 if cursor.node().end_byte() == start_offset {
3531 cursor.goto_next_sibling();
3532 }
3533 }
3534
3535 // Ascend to the root node within the same row.
3536 while cursor.goto_parent() {
3537 if cursor.node().start_position().row != row {
3538 break;
3539 }
3540 }
3541
3542 Some(cursor.node())
3543 }
3544
3545 /// Returns the outline for the buffer.
3546 ///
3547 /// This method allows passing an optional [`SyntaxTheme`] to
3548 /// syntax-highlight the returned symbols.
3549 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3550 self.outline_items_containing(0..self.len(), true, theme)
3551 .map(Outline::new)
3552 }
3553
3554 /// Returns all the symbols that contain the given position.
3555 ///
3556 /// This method allows passing an optional [`SyntaxTheme`] to
3557 /// syntax-highlight the returned symbols.
3558 pub fn symbols_containing<T: ToOffset>(
3559 &self,
3560 position: T,
3561 theme: Option<&SyntaxTheme>,
3562 ) -> Option<Vec<OutlineItem<Anchor>>> {
3563 let position = position.to_offset(self);
3564 let mut items = self.outline_items_containing(
3565 position.saturating_sub(1)..self.len().min(position + 1),
3566 false,
3567 theme,
3568 )?;
3569 let mut prev_depth = None;
3570 items.retain(|item| {
3571 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3572 prev_depth = Some(item.depth);
3573 result
3574 });
3575 Some(items)
3576 }
3577
3578 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3579 let range = range.to_offset(self);
3580 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3581 grammar.outline_config.as_ref().map(|c| &c.query)
3582 });
3583 let configs = matches
3584 .grammars()
3585 .iter()
3586 .map(|g| g.outline_config.as_ref().unwrap())
3587 .collect::<Vec<_>>();
3588
3589 while let Some(mat) = matches.peek() {
3590 let config = &configs[mat.grammar_index];
3591 let containing_item_node = maybe!({
3592 let item_node = mat.captures.iter().find_map(|cap| {
3593 if cap.index == config.item_capture_ix {
3594 Some(cap.node)
3595 } else {
3596 None
3597 }
3598 })?;
3599
3600 let item_byte_range = item_node.byte_range();
3601 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3602 None
3603 } else {
3604 Some(item_node)
3605 }
3606 });
3607
3608 if let Some(item_node) = containing_item_node {
3609 return Some(
3610 Point::from_ts_point(item_node.start_position())
3611 ..Point::from_ts_point(item_node.end_position()),
3612 );
3613 }
3614
3615 matches.advance();
3616 }
3617 None
3618 }
3619
3620 pub fn outline_items_containing<T: ToOffset>(
3621 &self,
3622 range: Range<T>,
3623 include_extra_context: bool,
3624 theme: Option<&SyntaxTheme>,
3625 ) -> Option<Vec<OutlineItem<Anchor>>> {
3626 let range = range.to_offset(self);
3627 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3628 grammar.outline_config.as_ref().map(|c| &c.query)
3629 });
3630 let configs = matches
3631 .grammars()
3632 .iter()
3633 .map(|g| g.outline_config.as_ref().unwrap())
3634 .collect::<Vec<_>>();
3635
3636 let mut items = Vec::new();
3637 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3638 while let Some(mat) = matches.peek() {
3639 let config = &configs[mat.grammar_index];
3640 if let Some(item) =
3641 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3642 {
3643 items.push(item);
3644 } else if let Some(capture) = mat
3645 .captures
3646 .iter()
3647 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3648 {
3649 let capture_range = capture.node.start_position()..capture.node.end_position();
3650 let mut capture_row_range =
3651 capture_range.start.row as u32..capture_range.end.row as u32;
3652 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3653 {
3654 capture_row_range.end -= 1;
3655 }
3656 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3657 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3658 last_row_range.end = capture_row_range.end;
3659 } else {
3660 annotation_row_ranges.push(capture_row_range);
3661 }
3662 } else {
3663 annotation_row_ranges.push(capture_row_range);
3664 }
3665 }
3666 matches.advance();
3667 }
3668
3669 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3670
3671 // Assign depths based on containment relationships and convert to anchors.
3672 let mut item_ends_stack = Vec::<Point>::new();
3673 let mut anchor_items = Vec::new();
3674 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3675 for item in items {
3676 while let Some(last_end) = item_ends_stack.last().copied() {
3677 if last_end < item.range.end {
3678 item_ends_stack.pop();
3679 } else {
3680 break;
3681 }
3682 }
3683
3684 let mut annotation_row_range = None;
3685 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3686 let row_preceding_item = item.range.start.row.saturating_sub(1);
3687 if next_annotation_row_range.end < row_preceding_item {
3688 annotation_row_ranges.next();
3689 } else {
3690 if next_annotation_row_range.end == row_preceding_item {
3691 annotation_row_range = Some(next_annotation_row_range.clone());
3692 annotation_row_ranges.next();
3693 }
3694 break;
3695 }
3696 }
3697
3698 anchor_items.push(OutlineItem {
3699 depth: item_ends_stack.len(),
3700 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3701 text: item.text,
3702 highlight_ranges: item.highlight_ranges,
3703 name_ranges: item.name_ranges,
3704 body_range: item.body_range.map(|body_range| {
3705 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3706 }),
3707 annotation_range: annotation_row_range.map(|annotation_range| {
3708 self.anchor_after(Point::new(annotation_range.start, 0))
3709 ..self.anchor_before(Point::new(
3710 annotation_range.end,
3711 self.line_len(annotation_range.end),
3712 ))
3713 }),
3714 });
3715 item_ends_stack.push(item.range.end);
3716 }
3717
3718 Some(anchor_items)
3719 }
3720
3721 fn next_outline_item(
3722 &self,
3723 config: &OutlineConfig,
3724 mat: &SyntaxMapMatch,
3725 range: &Range<usize>,
3726 include_extra_context: bool,
3727 theme: Option<&SyntaxTheme>,
3728 ) -> Option<OutlineItem<Point>> {
3729 let item_node = mat.captures.iter().find_map(|cap| {
3730 if cap.index == config.item_capture_ix {
3731 Some(cap.node)
3732 } else {
3733 None
3734 }
3735 })?;
3736
3737 let item_byte_range = item_node.byte_range();
3738 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3739 return None;
3740 }
3741 let item_point_range = Point::from_ts_point(item_node.start_position())
3742 ..Point::from_ts_point(item_node.end_position());
3743
3744 let mut open_point = None;
3745 let mut close_point = None;
3746 let mut buffer_ranges = Vec::new();
3747 for capture in mat.captures {
3748 let node_is_name;
3749 if capture.index == config.name_capture_ix {
3750 node_is_name = true;
3751 } else if Some(capture.index) == config.context_capture_ix
3752 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3753 {
3754 node_is_name = false;
3755 } else {
3756 if Some(capture.index) == config.open_capture_ix {
3757 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3758 } else if Some(capture.index) == config.close_capture_ix {
3759 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3760 }
3761
3762 continue;
3763 }
3764
3765 let mut range = capture.node.start_byte()..capture.node.end_byte();
3766 let start = capture.node.start_position();
3767 if capture.node.end_position().row > start.row {
3768 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3769 }
3770
3771 if !range.is_empty() {
3772 buffer_ranges.push((range, node_is_name));
3773 }
3774 }
3775 if buffer_ranges.is_empty() {
3776 return None;
3777 }
3778 let mut text = String::new();
3779 let mut highlight_ranges = Vec::new();
3780 let mut name_ranges = Vec::new();
3781 let mut chunks = self.chunks(
3782 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3783 true,
3784 );
3785 let mut last_buffer_range_end = 0;
3786
3787 for (buffer_range, is_name) in buffer_ranges {
3788 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3789 if space_added {
3790 text.push(' ');
3791 }
3792 let before_append_len = text.len();
3793 let mut offset = buffer_range.start;
3794 chunks.seek(buffer_range.clone());
3795 for mut chunk in chunks.by_ref() {
3796 if chunk.text.len() > buffer_range.end - offset {
3797 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3798 offset = buffer_range.end;
3799 } else {
3800 offset += chunk.text.len();
3801 }
3802 let style = chunk
3803 .syntax_highlight_id
3804 .zip(theme)
3805 .and_then(|(highlight, theme)| highlight.style(theme));
3806 if let Some(style) = style {
3807 let start = text.len();
3808 let end = start + chunk.text.len();
3809 highlight_ranges.push((start..end, style));
3810 }
3811 text.push_str(chunk.text);
3812 if offset >= buffer_range.end {
3813 break;
3814 }
3815 }
3816 if is_name {
3817 let after_append_len = text.len();
3818 let start = if space_added && !name_ranges.is_empty() {
3819 before_append_len - 1
3820 } else {
3821 before_append_len
3822 };
3823 name_ranges.push(start..after_append_len);
3824 }
3825 last_buffer_range_end = buffer_range.end;
3826 }
3827
3828 Some(OutlineItem {
3829 depth: 0, // We'll calculate the depth later
3830 range: item_point_range,
3831 text,
3832 highlight_ranges,
3833 name_ranges,
3834 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3835 annotation_range: None,
3836 })
3837 }
3838
3839 pub fn function_body_fold_ranges<T: ToOffset>(
3840 &self,
3841 within: Range<T>,
3842 ) -> impl Iterator<Item = Range<usize>> + '_ {
3843 self.text_object_ranges(within, TreeSitterOptions::default())
3844 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3845 }
3846
3847 /// For each grammar in the language, runs the provided
3848 /// [`tree_sitter::Query`] against the given range.
3849 pub fn matches(
3850 &self,
3851 range: Range<usize>,
3852 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3853 ) -> SyntaxMapMatches<'_> {
3854 self.syntax.matches(range, self, query)
3855 }
3856
3857 pub fn all_bracket_ranges(
3858 &self,
3859 range: Range<usize>,
3860 ) -> impl Iterator<Item = BracketMatch> + '_ {
3861 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3862 grammar.brackets_config.as_ref().map(|c| &c.query)
3863 });
3864 let configs = matches
3865 .grammars()
3866 .iter()
3867 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3868 .collect::<Vec<_>>();
3869
3870 iter::from_fn(move || {
3871 while let Some(mat) = matches.peek() {
3872 let mut open = None;
3873 let mut close = None;
3874 let config = &configs[mat.grammar_index];
3875 let pattern = &config.patterns[mat.pattern_index];
3876 for capture in mat.captures {
3877 if capture.index == config.open_capture_ix {
3878 open = Some(capture.node.byte_range());
3879 } else if capture.index == config.close_capture_ix {
3880 close = Some(capture.node.byte_range());
3881 }
3882 }
3883
3884 matches.advance();
3885
3886 let Some((open_range, close_range)) = open.zip(close) else {
3887 continue;
3888 };
3889
3890 let bracket_range = open_range.start..=close_range.end;
3891 if !bracket_range.overlaps(&range) {
3892 continue;
3893 }
3894
3895 return Some(BracketMatch {
3896 open_range,
3897 close_range,
3898 newline_only: pattern.newline_only,
3899 });
3900 }
3901 None
3902 })
3903 }
3904
3905 /// Returns bracket range pairs overlapping or adjacent to `range`
3906 pub fn bracket_ranges<T: ToOffset>(
3907 &self,
3908 range: Range<T>,
3909 ) -> impl Iterator<Item = BracketMatch> + '_ {
3910 // Find bracket pairs that *inclusively* contain the given range.
3911 let range = range.start.to_offset(self).saturating_sub(1)
3912 ..self.len().min(range.end.to_offset(self) + 1);
3913 self.all_bracket_ranges(range)
3914 .filter(|pair| !pair.newline_only)
3915 }
3916
3917 pub fn debug_variables_query<T: ToOffset>(
3918 &self,
3919 range: Range<T>,
3920 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3921 let range = range.start.to_offset(self).saturating_sub(1)
3922 ..self.len().min(range.end.to_offset(self) + 1);
3923
3924 let mut matches = self.syntax.matches_with_options(
3925 range.clone(),
3926 &self.text,
3927 TreeSitterOptions::default(),
3928 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3929 );
3930
3931 let configs = matches
3932 .grammars()
3933 .iter()
3934 .map(|grammar| grammar.debug_variables_config.as_ref())
3935 .collect::<Vec<_>>();
3936
3937 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3938
3939 iter::from_fn(move || {
3940 loop {
3941 while let Some(capture) = captures.pop() {
3942 if capture.0.overlaps(&range) {
3943 return Some(capture);
3944 }
3945 }
3946
3947 let mat = matches.peek()?;
3948
3949 let Some(config) = configs[mat.grammar_index].as_ref() else {
3950 matches.advance();
3951 continue;
3952 };
3953
3954 for capture in mat.captures {
3955 let Some(ix) = config
3956 .objects_by_capture_ix
3957 .binary_search_by_key(&capture.index, |e| e.0)
3958 .ok()
3959 else {
3960 continue;
3961 };
3962 let text_object = config.objects_by_capture_ix[ix].1;
3963 let byte_range = capture.node.byte_range();
3964
3965 let mut found = false;
3966 for (range, existing) in captures.iter_mut() {
3967 if existing == &text_object {
3968 range.start = range.start.min(byte_range.start);
3969 range.end = range.end.max(byte_range.end);
3970 found = true;
3971 break;
3972 }
3973 }
3974
3975 if !found {
3976 captures.push((byte_range, text_object));
3977 }
3978 }
3979
3980 matches.advance();
3981 }
3982 })
3983 }
3984
3985 pub fn text_object_ranges<T: ToOffset>(
3986 &self,
3987 range: Range<T>,
3988 options: TreeSitterOptions,
3989 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3990 let range = range.start.to_offset(self).saturating_sub(1)
3991 ..self.len().min(range.end.to_offset(self) + 1);
3992
3993 let mut matches =
3994 self.syntax
3995 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3996 grammar.text_object_config.as_ref().map(|c| &c.query)
3997 });
3998
3999 let configs = matches
4000 .grammars()
4001 .iter()
4002 .map(|grammar| grammar.text_object_config.as_ref())
4003 .collect::<Vec<_>>();
4004
4005 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4006
4007 iter::from_fn(move || {
4008 loop {
4009 while let Some(capture) = captures.pop() {
4010 if capture.0.overlaps(&range) {
4011 return Some(capture);
4012 }
4013 }
4014
4015 let mat = matches.peek()?;
4016
4017 let Some(config) = configs[mat.grammar_index].as_ref() else {
4018 matches.advance();
4019 continue;
4020 };
4021
4022 for capture in mat.captures {
4023 let Some(ix) = config
4024 .text_objects_by_capture_ix
4025 .binary_search_by_key(&capture.index, |e| e.0)
4026 .ok()
4027 else {
4028 continue;
4029 };
4030 let text_object = config.text_objects_by_capture_ix[ix].1;
4031 let byte_range = capture.node.byte_range();
4032
4033 let mut found = false;
4034 for (range, existing) in captures.iter_mut() {
4035 if existing == &text_object {
4036 range.start = range.start.min(byte_range.start);
4037 range.end = range.end.max(byte_range.end);
4038 found = true;
4039 break;
4040 }
4041 }
4042
4043 if !found {
4044 captures.push((byte_range, text_object));
4045 }
4046 }
4047
4048 matches.advance();
4049 }
4050 })
4051 }
4052
4053 /// Returns enclosing bracket ranges containing the given range
4054 pub fn enclosing_bracket_ranges<T: ToOffset>(
4055 &self,
4056 range: Range<T>,
4057 ) -> impl Iterator<Item = BracketMatch> + '_ {
4058 let range = range.start.to_offset(self)..range.end.to_offset(self);
4059
4060 self.bracket_ranges(range.clone()).filter(move |pair| {
4061 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4062 })
4063 }
4064
4065 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4066 ///
4067 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4068 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4069 &self,
4070 range: Range<T>,
4071 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4072 ) -> Option<(Range<usize>, Range<usize>)> {
4073 let range = range.start.to_offset(self)..range.end.to_offset(self);
4074
4075 // Get the ranges of the innermost pair of brackets.
4076 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4077
4078 for pair in self.enclosing_bracket_ranges(range.clone()) {
4079 if let Some(range_filter) = range_filter
4080 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4081 {
4082 continue;
4083 }
4084
4085 let len = pair.close_range.end - pair.open_range.start;
4086
4087 if let Some((existing_open, existing_close)) = &result {
4088 let existing_len = existing_close.end - existing_open.start;
4089 if len > existing_len {
4090 continue;
4091 }
4092 }
4093
4094 result = Some((pair.open_range, pair.close_range));
4095 }
4096
4097 result
4098 }
4099
4100 /// Returns anchor ranges for any matches of the redaction query.
4101 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4102 /// will be run on the relevant section of the buffer.
4103 pub fn redacted_ranges<T: ToOffset>(
4104 &self,
4105 range: Range<T>,
4106 ) -> impl Iterator<Item = Range<usize>> + '_ {
4107 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4108 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4109 grammar
4110 .redactions_config
4111 .as_ref()
4112 .map(|config| &config.query)
4113 });
4114
4115 let configs = syntax_matches
4116 .grammars()
4117 .iter()
4118 .map(|grammar| grammar.redactions_config.as_ref())
4119 .collect::<Vec<_>>();
4120
4121 iter::from_fn(move || {
4122 let redacted_range = syntax_matches
4123 .peek()
4124 .and_then(|mat| {
4125 configs[mat.grammar_index].and_then(|config| {
4126 mat.captures
4127 .iter()
4128 .find(|capture| capture.index == config.redaction_capture_ix)
4129 })
4130 })
4131 .map(|mat| mat.node.byte_range());
4132 syntax_matches.advance();
4133 redacted_range
4134 })
4135 }
4136
4137 pub fn injections_intersecting_range<T: ToOffset>(
4138 &self,
4139 range: Range<T>,
4140 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4141 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4142
4143 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4144 grammar
4145 .injection_config
4146 .as_ref()
4147 .map(|config| &config.query)
4148 });
4149
4150 let configs = syntax_matches
4151 .grammars()
4152 .iter()
4153 .map(|grammar| grammar.injection_config.as_ref())
4154 .collect::<Vec<_>>();
4155
4156 iter::from_fn(move || {
4157 let ranges = syntax_matches.peek().and_then(|mat| {
4158 let config = &configs[mat.grammar_index]?;
4159 let content_capture_range = mat.captures.iter().find_map(|capture| {
4160 if capture.index == config.content_capture_ix {
4161 Some(capture.node.byte_range())
4162 } else {
4163 None
4164 }
4165 })?;
4166 let language = self.language_at(content_capture_range.start)?;
4167 Some((content_capture_range, language))
4168 });
4169 syntax_matches.advance();
4170 ranges
4171 })
4172 }
4173
4174 pub fn runnable_ranges(
4175 &self,
4176 offset_range: Range<usize>,
4177 ) -> impl Iterator<Item = RunnableRange> + '_ {
4178 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4179 grammar.runnable_config.as_ref().map(|config| &config.query)
4180 });
4181
4182 let test_configs = syntax_matches
4183 .grammars()
4184 .iter()
4185 .map(|grammar| grammar.runnable_config.as_ref())
4186 .collect::<Vec<_>>();
4187
4188 iter::from_fn(move || {
4189 loop {
4190 let mat = syntax_matches.peek()?;
4191
4192 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4193 let mut run_range = None;
4194 let full_range = mat.captures.iter().fold(
4195 Range {
4196 start: usize::MAX,
4197 end: 0,
4198 },
4199 |mut acc, next| {
4200 let byte_range = next.node.byte_range();
4201 if acc.start > byte_range.start {
4202 acc.start = byte_range.start;
4203 }
4204 if acc.end < byte_range.end {
4205 acc.end = byte_range.end;
4206 }
4207 acc
4208 },
4209 );
4210 if full_range.start > full_range.end {
4211 // We did not find a full spanning range of this match.
4212 return None;
4213 }
4214 let extra_captures: SmallVec<[_; 1]> =
4215 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4216 test_configs
4217 .extra_captures
4218 .get(capture.index as usize)
4219 .cloned()
4220 .and_then(|tag_name| match tag_name {
4221 RunnableCapture::Named(name) => {
4222 Some((capture.node.byte_range(), name))
4223 }
4224 RunnableCapture::Run => {
4225 let _ = run_range.insert(capture.node.byte_range());
4226 None
4227 }
4228 })
4229 }));
4230 let run_range = run_range?;
4231 let tags = test_configs
4232 .query
4233 .property_settings(mat.pattern_index)
4234 .iter()
4235 .filter_map(|property| {
4236 if *property.key == *"tag" {
4237 property
4238 .value
4239 .as_ref()
4240 .map(|value| RunnableTag(value.to_string().into()))
4241 } else {
4242 None
4243 }
4244 })
4245 .collect();
4246 let extra_captures = extra_captures
4247 .into_iter()
4248 .map(|(range, name)| {
4249 (
4250 name.to_string(),
4251 self.text_for_range(range.clone()).collect::<String>(),
4252 )
4253 })
4254 .collect();
4255 // All tags should have the same range.
4256 Some(RunnableRange {
4257 run_range,
4258 full_range,
4259 runnable: Runnable {
4260 tags,
4261 language: mat.language,
4262 buffer: self.remote_id(),
4263 },
4264 extra_captures,
4265 buffer_id: self.remote_id(),
4266 })
4267 });
4268
4269 syntax_matches.advance();
4270 if test_range.is_some() {
4271 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4272 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4273 return test_range;
4274 }
4275 }
4276 })
4277 }
4278
4279 /// Returns selections for remote peers intersecting the given range.
4280 #[allow(clippy::type_complexity)]
4281 pub fn selections_in_range(
4282 &self,
4283 range: Range<Anchor>,
4284 include_local: bool,
4285 ) -> impl Iterator<
4286 Item = (
4287 ReplicaId,
4288 bool,
4289 CursorShape,
4290 impl Iterator<Item = &Selection<Anchor>> + '_,
4291 ),
4292 > + '_ {
4293 self.remote_selections
4294 .iter()
4295 .filter(move |(replica_id, set)| {
4296 (include_local || **replica_id != self.text.replica_id())
4297 && !set.selections.is_empty()
4298 })
4299 .map(move |(replica_id, set)| {
4300 let start_ix = match set.selections.binary_search_by(|probe| {
4301 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4302 }) {
4303 Ok(ix) | Err(ix) => ix,
4304 };
4305 let end_ix = match set.selections.binary_search_by(|probe| {
4306 probe.start.cmp(&range.end, self).then(Ordering::Less)
4307 }) {
4308 Ok(ix) | Err(ix) => ix,
4309 };
4310
4311 (
4312 *replica_id,
4313 set.line_mode,
4314 set.cursor_shape,
4315 set.selections[start_ix..end_ix].iter(),
4316 )
4317 })
4318 }
4319
4320 /// Returns if the buffer contains any diagnostics.
4321 pub fn has_diagnostics(&self) -> bool {
4322 !self.diagnostics.is_empty()
4323 }
4324
4325 /// Returns all the diagnostics intersecting the given range.
4326 pub fn diagnostics_in_range<'a, T, O>(
4327 &'a self,
4328 search_range: Range<T>,
4329 reversed: bool,
4330 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4331 where
4332 T: 'a + Clone + ToOffset,
4333 O: 'a + FromAnchor,
4334 {
4335 let mut iterators: Vec<_> = self
4336 .diagnostics
4337 .iter()
4338 .map(|(_, collection)| {
4339 collection
4340 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4341 .peekable()
4342 })
4343 .collect();
4344
4345 std::iter::from_fn(move || {
4346 let (next_ix, _) = iterators
4347 .iter_mut()
4348 .enumerate()
4349 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4350 .min_by(|(_, a), (_, b)| {
4351 let cmp = a
4352 .range
4353 .start
4354 .cmp(&b.range.start, self)
4355 // when range is equal, sort by diagnostic severity
4356 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4357 // and stabilize order with group_id
4358 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4359 if reversed { cmp.reverse() } else { cmp }
4360 })?;
4361 iterators[next_ix]
4362 .next()
4363 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4364 diagnostic,
4365 range: FromAnchor::from_anchor(&range.start, self)
4366 ..FromAnchor::from_anchor(&range.end, self),
4367 })
4368 })
4369 }
4370
4371 /// Returns all the diagnostic groups associated with the given
4372 /// language server ID. If no language server ID is provided,
4373 /// all diagnostics groups are returned.
4374 pub fn diagnostic_groups(
4375 &self,
4376 language_server_id: Option<LanguageServerId>,
4377 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4378 let mut groups = Vec::new();
4379
4380 if let Some(language_server_id) = language_server_id {
4381 if let Ok(ix) = self
4382 .diagnostics
4383 .binary_search_by_key(&language_server_id, |e| e.0)
4384 {
4385 self.diagnostics[ix]
4386 .1
4387 .groups(language_server_id, &mut groups, self);
4388 }
4389 } else {
4390 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4391 diagnostics.groups(*language_server_id, &mut groups, self);
4392 }
4393 }
4394
4395 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4396 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4397 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4398 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4399 });
4400
4401 groups
4402 }
4403
4404 /// Returns an iterator over the diagnostics for the given group.
4405 pub fn diagnostic_group<O>(
4406 &self,
4407 group_id: usize,
4408 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4409 where
4410 O: FromAnchor + 'static,
4411 {
4412 self.diagnostics
4413 .iter()
4414 .flat_map(move |(_, set)| set.group(group_id, self))
4415 }
4416
4417 /// An integer version number that accounts for all updates besides
4418 /// the buffer's text itself (which is versioned via a version vector).
4419 pub fn non_text_state_update_count(&self) -> usize {
4420 self.non_text_state_update_count
4421 }
4422
4423 /// An integer version that changes when the buffer's syntax changes.
4424 pub fn syntax_update_count(&self) -> usize {
4425 self.syntax.update_count()
4426 }
4427
4428 /// Returns a snapshot of underlying file.
4429 pub fn file(&self) -> Option<&Arc<dyn File>> {
4430 self.file.as_ref()
4431 }
4432
4433 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4434 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4435 if let Some(file) = self.file() {
4436 if file.path().file_name().is_none() || include_root {
4437 Some(file.full_path(cx))
4438 } else {
4439 Some(file.path().to_path_buf())
4440 }
4441 } else {
4442 None
4443 }
4444 }
4445
4446 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4447 let query_str = query.fuzzy_contents;
4448 if query_str.is_some_and(|query| query.is_empty()) {
4449 return BTreeMap::default();
4450 }
4451
4452 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4453 language,
4454 override_id: None,
4455 }));
4456
4457 let mut query_ix = 0;
4458 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4459 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4460
4461 let mut words = BTreeMap::default();
4462 let mut current_word_start_ix = None;
4463 let mut chunk_ix = query.range.start;
4464 for chunk in self.chunks(query.range, false) {
4465 for (i, c) in chunk.text.char_indices() {
4466 let ix = chunk_ix + i;
4467 if classifier.is_word(c) {
4468 if current_word_start_ix.is_none() {
4469 current_word_start_ix = Some(ix);
4470 }
4471
4472 if let Some(query_chars) = &query_chars
4473 && query_ix < query_len
4474 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4475 {
4476 query_ix += 1;
4477 }
4478 continue;
4479 } else if let Some(word_start) = current_word_start_ix.take()
4480 && query_ix == query_len
4481 {
4482 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4483 let mut word_text = self.text_for_range(word_start..ix).peekable();
4484 let first_char = word_text
4485 .peek()
4486 .and_then(|first_chunk| first_chunk.chars().next());
4487 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4488 if !query.skip_digits
4489 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4490 {
4491 words.insert(word_text.collect(), word_range);
4492 }
4493 }
4494 query_ix = 0;
4495 }
4496 chunk_ix += chunk.text.len();
4497 }
4498
4499 words
4500 }
4501}
4502
4503pub struct WordsQuery<'a> {
4504 /// Only returns words with all chars from the fuzzy string in them.
4505 pub fuzzy_contents: Option<&'a str>,
4506 /// Skips words that start with a digit.
4507 pub skip_digits: bool,
4508 /// Buffer offset range, to look for words.
4509 pub range: Range<usize>,
4510}
4511
4512fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4513 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4514}
4515
4516fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4517 let mut result = IndentSize::spaces(0);
4518 for c in text {
4519 let kind = match c {
4520 ' ' => IndentKind::Space,
4521 '\t' => IndentKind::Tab,
4522 _ => break,
4523 };
4524 if result.len == 0 {
4525 result.kind = kind;
4526 }
4527 result.len += 1;
4528 }
4529 result
4530}
4531
4532impl Clone for BufferSnapshot {
4533 fn clone(&self) -> Self {
4534 Self {
4535 text: self.text.clone(),
4536 syntax: self.syntax.clone(),
4537 file: self.file.clone(),
4538 remote_selections: self.remote_selections.clone(),
4539 diagnostics: self.diagnostics.clone(),
4540 language: self.language.clone(),
4541 non_text_state_update_count: self.non_text_state_update_count,
4542 }
4543 }
4544}
4545
4546impl Deref for BufferSnapshot {
4547 type Target = text::BufferSnapshot;
4548
4549 fn deref(&self) -> &Self::Target {
4550 &self.text
4551 }
4552}
4553
4554unsafe impl Send for BufferChunks<'_> {}
4555
4556impl<'a> BufferChunks<'a> {
4557 pub(crate) fn new(
4558 text: &'a Rope,
4559 range: Range<usize>,
4560 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4561 diagnostics: bool,
4562 buffer_snapshot: Option<&'a BufferSnapshot>,
4563 ) -> Self {
4564 let mut highlights = None;
4565 if let Some((captures, highlight_maps)) = syntax {
4566 highlights = Some(BufferChunkHighlights {
4567 captures,
4568 next_capture: None,
4569 stack: Default::default(),
4570 highlight_maps,
4571 })
4572 }
4573
4574 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4575 let chunks = text.chunks_in_range(range.clone());
4576
4577 let mut this = BufferChunks {
4578 range,
4579 buffer_snapshot,
4580 chunks,
4581 diagnostic_endpoints,
4582 error_depth: 0,
4583 warning_depth: 0,
4584 information_depth: 0,
4585 hint_depth: 0,
4586 unnecessary_depth: 0,
4587 underline: true,
4588 highlights,
4589 };
4590 this.initialize_diagnostic_endpoints();
4591 this
4592 }
4593
4594 /// Seeks to the given byte offset in the buffer.
4595 pub fn seek(&mut self, range: Range<usize>) {
4596 let old_range = std::mem::replace(&mut self.range, range.clone());
4597 self.chunks.set_range(self.range.clone());
4598 if let Some(highlights) = self.highlights.as_mut() {
4599 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4600 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4601 highlights
4602 .stack
4603 .retain(|(end_offset, _)| *end_offset > range.start);
4604 if let Some(capture) = &highlights.next_capture
4605 && range.start >= capture.node.start_byte()
4606 {
4607 let next_capture_end = capture.node.end_byte();
4608 if range.start < next_capture_end {
4609 highlights.stack.push((
4610 next_capture_end,
4611 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4612 ));
4613 }
4614 highlights.next_capture.take();
4615 }
4616 } else if let Some(snapshot) = self.buffer_snapshot {
4617 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4618 *highlights = BufferChunkHighlights {
4619 captures,
4620 next_capture: None,
4621 stack: Default::default(),
4622 highlight_maps,
4623 };
4624 } else {
4625 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4626 // Seeking such BufferChunks is not supported.
4627 debug_assert!(
4628 false,
4629 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4630 );
4631 }
4632
4633 highlights.captures.set_byte_range(self.range.clone());
4634 self.initialize_diagnostic_endpoints();
4635 }
4636 }
4637
4638 fn initialize_diagnostic_endpoints(&mut self) {
4639 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4640 && let Some(buffer) = self.buffer_snapshot
4641 {
4642 let mut diagnostic_endpoints = Vec::new();
4643 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4644 diagnostic_endpoints.push(DiagnosticEndpoint {
4645 offset: entry.range.start,
4646 is_start: true,
4647 severity: entry.diagnostic.severity,
4648 is_unnecessary: entry.diagnostic.is_unnecessary,
4649 underline: entry.diagnostic.underline,
4650 });
4651 diagnostic_endpoints.push(DiagnosticEndpoint {
4652 offset: entry.range.end,
4653 is_start: false,
4654 severity: entry.diagnostic.severity,
4655 is_unnecessary: entry.diagnostic.is_unnecessary,
4656 underline: entry.diagnostic.underline,
4657 });
4658 }
4659 diagnostic_endpoints
4660 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4661 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4662 self.hint_depth = 0;
4663 self.error_depth = 0;
4664 self.warning_depth = 0;
4665 self.information_depth = 0;
4666 }
4667 }
4668
4669 /// The current byte offset in the buffer.
4670 pub fn offset(&self) -> usize {
4671 self.range.start
4672 }
4673
4674 pub fn range(&self) -> Range<usize> {
4675 self.range.clone()
4676 }
4677
4678 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4679 let depth = match endpoint.severity {
4680 DiagnosticSeverity::ERROR => &mut self.error_depth,
4681 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4682 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4683 DiagnosticSeverity::HINT => &mut self.hint_depth,
4684 _ => return,
4685 };
4686 if endpoint.is_start {
4687 *depth += 1;
4688 } else {
4689 *depth -= 1;
4690 }
4691
4692 if endpoint.is_unnecessary {
4693 if endpoint.is_start {
4694 self.unnecessary_depth += 1;
4695 } else {
4696 self.unnecessary_depth -= 1;
4697 }
4698 }
4699 }
4700
4701 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4702 if self.error_depth > 0 {
4703 Some(DiagnosticSeverity::ERROR)
4704 } else if self.warning_depth > 0 {
4705 Some(DiagnosticSeverity::WARNING)
4706 } else if self.information_depth > 0 {
4707 Some(DiagnosticSeverity::INFORMATION)
4708 } else if self.hint_depth > 0 {
4709 Some(DiagnosticSeverity::HINT)
4710 } else {
4711 None
4712 }
4713 }
4714
4715 fn current_code_is_unnecessary(&self) -> bool {
4716 self.unnecessary_depth > 0
4717 }
4718}
4719
4720impl<'a> Iterator for BufferChunks<'a> {
4721 type Item = Chunk<'a>;
4722
4723 fn next(&mut self) -> Option<Self::Item> {
4724 let mut next_capture_start = usize::MAX;
4725 let mut next_diagnostic_endpoint = usize::MAX;
4726
4727 if let Some(highlights) = self.highlights.as_mut() {
4728 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4729 if *parent_capture_end <= self.range.start {
4730 highlights.stack.pop();
4731 } else {
4732 break;
4733 }
4734 }
4735
4736 if highlights.next_capture.is_none() {
4737 highlights.next_capture = highlights.captures.next();
4738 }
4739
4740 while let Some(capture) = highlights.next_capture.as_ref() {
4741 if self.range.start < capture.node.start_byte() {
4742 next_capture_start = capture.node.start_byte();
4743 break;
4744 } else {
4745 let highlight_id =
4746 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4747 highlights
4748 .stack
4749 .push((capture.node.end_byte(), highlight_id));
4750 highlights.next_capture = highlights.captures.next();
4751 }
4752 }
4753 }
4754
4755 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4756 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4757 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4758 if endpoint.offset <= self.range.start {
4759 self.update_diagnostic_depths(endpoint);
4760 diagnostic_endpoints.next();
4761 self.underline = endpoint.underline;
4762 } else {
4763 next_diagnostic_endpoint = endpoint.offset;
4764 break;
4765 }
4766 }
4767 }
4768 self.diagnostic_endpoints = diagnostic_endpoints;
4769
4770 if let Some(chunk) = self.chunks.peek() {
4771 let chunk_start = self.range.start;
4772 let mut chunk_end = (self.chunks.offset() + chunk.len())
4773 .min(next_capture_start)
4774 .min(next_diagnostic_endpoint);
4775 let mut highlight_id = None;
4776 if let Some(highlights) = self.highlights.as_ref()
4777 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4778 {
4779 chunk_end = chunk_end.min(*parent_capture_end);
4780 highlight_id = Some(*parent_highlight_id);
4781 }
4782
4783 let slice =
4784 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4785 self.range.start = chunk_end;
4786 if self.range.start == self.chunks.offset() + chunk.len() {
4787 self.chunks.next().unwrap();
4788 }
4789
4790 Some(Chunk {
4791 text: slice,
4792 syntax_highlight_id: highlight_id,
4793 underline: self.underline,
4794 diagnostic_severity: self.current_diagnostic_severity(),
4795 is_unnecessary: self.current_code_is_unnecessary(),
4796 ..Chunk::default()
4797 })
4798 } else {
4799 None
4800 }
4801 }
4802}
4803
4804impl operation_queue::Operation for Operation {
4805 fn lamport_timestamp(&self) -> clock::Lamport {
4806 match self {
4807 Operation::Buffer(_) => {
4808 unreachable!("buffer operations should never be deferred at this layer")
4809 }
4810 Operation::UpdateDiagnostics {
4811 lamport_timestamp, ..
4812 }
4813 | Operation::UpdateSelections {
4814 lamport_timestamp, ..
4815 }
4816 | Operation::UpdateCompletionTriggers {
4817 lamport_timestamp, ..
4818 } => *lamport_timestamp,
4819 }
4820 }
4821}
4822
4823impl Default for Diagnostic {
4824 fn default() -> Self {
4825 Self {
4826 source: Default::default(),
4827 source_kind: DiagnosticSourceKind::Other,
4828 code: None,
4829 code_description: None,
4830 severity: DiagnosticSeverity::ERROR,
4831 message: Default::default(),
4832 markdown: None,
4833 group_id: 0,
4834 is_primary: false,
4835 is_disk_based: false,
4836 is_unnecessary: false,
4837 underline: true,
4838 data: None,
4839 }
4840 }
4841}
4842
4843impl IndentSize {
4844 /// Returns an [`IndentSize`] representing the given spaces.
4845 pub fn spaces(len: u32) -> Self {
4846 Self {
4847 len,
4848 kind: IndentKind::Space,
4849 }
4850 }
4851
4852 /// Returns an [`IndentSize`] representing a tab.
4853 pub fn tab() -> Self {
4854 Self {
4855 len: 1,
4856 kind: IndentKind::Tab,
4857 }
4858 }
4859
4860 /// An iterator over the characters represented by this [`IndentSize`].
4861 pub fn chars(&self) -> impl Iterator<Item = char> {
4862 iter::repeat(self.char()).take(self.len as usize)
4863 }
4864
4865 /// The character representation of this [`IndentSize`].
4866 pub fn char(&self) -> char {
4867 match self.kind {
4868 IndentKind::Space => ' ',
4869 IndentKind::Tab => '\t',
4870 }
4871 }
4872
4873 /// Consumes the current [`IndentSize`] and returns a new one that has
4874 /// been shrunk or enlarged by the given size along the given direction.
4875 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4876 match direction {
4877 Ordering::Less => {
4878 if self.kind == size.kind && self.len >= size.len {
4879 self.len -= size.len;
4880 }
4881 }
4882 Ordering::Equal => {}
4883 Ordering::Greater => {
4884 if self.len == 0 {
4885 self = size;
4886 } else if self.kind == size.kind {
4887 self.len += size.len;
4888 }
4889 }
4890 }
4891 self
4892 }
4893
4894 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4895 match self.kind {
4896 IndentKind::Space => self.len as usize,
4897 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4898 }
4899 }
4900}
4901
4902#[cfg(any(test, feature = "test-support"))]
4903pub struct TestFile {
4904 pub path: Arc<Path>,
4905 pub root_name: String,
4906 pub local_root: Option<PathBuf>,
4907}
4908
4909#[cfg(any(test, feature = "test-support"))]
4910impl File for TestFile {
4911 fn path(&self) -> &Arc<Path> {
4912 &self.path
4913 }
4914
4915 fn full_path(&self, _: &gpui::App) -> PathBuf {
4916 PathBuf::from(&self.root_name).join(self.path.as_ref())
4917 }
4918
4919 fn as_local(&self) -> Option<&dyn LocalFile> {
4920 if self.local_root.is_some() {
4921 Some(self)
4922 } else {
4923 None
4924 }
4925 }
4926
4927 fn disk_state(&self) -> DiskState {
4928 unimplemented!()
4929 }
4930
4931 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4932 self.path().file_name().unwrap_or(self.root_name.as_ref())
4933 }
4934
4935 fn worktree_id(&self, _: &App) -> WorktreeId {
4936 WorktreeId::from_usize(0)
4937 }
4938
4939 fn to_proto(&self, _: &App) -> rpc::proto::File {
4940 unimplemented!()
4941 }
4942
4943 fn is_private(&self) -> bool {
4944 false
4945 }
4946}
4947
4948#[cfg(any(test, feature = "test-support"))]
4949impl LocalFile for TestFile {
4950 fn abs_path(&self, _cx: &App) -> PathBuf {
4951 PathBuf::from(self.local_root.as_ref().unwrap())
4952 .join(&self.root_name)
4953 .join(self.path.as_ref())
4954 }
4955
4956 fn load(&self, _cx: &App) -> Task<Result<String>> {
4957 unimplemented!()
4958 }
4959
4960 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4961 unimplemented!()
4962 }
4963}
4964
4965pub(crate) fn contiguous_ranges(
4966 values: impl Iterator<Item = u32>,
4967 max_len: usize,
4968) -> impl Iterator<Item = Range<u32>> {
4969 let mut values = values;
4970 let mut current_range: Option<Range<u32>> = None;
4971 std::iter::from_fn(move || {
4972 loop {
4973 if let Some(value) = values.next() {
4974 if let Some(range) = &mut current_range
4975 && value == range.end
4976 && range.len() < max_len
4977 {
4978 range.end += 1;
4979 continue;
4980 }
4981
4982 let prev_range = current_range.clone();
4983 current_range = Some(value..(value + 1));
4984 if prev_range.is_some() {
4985 return prev_range;
4986 }
4987 } else {
4988 return current_range.take();
4989 }
4990 }
4991 })
4992}
4993
4994#[derive(Default, Debug)]
4995pub struct CharClassifier {
4996 scope: Option<LanguageScope>,
4997 for_completion: bool,
4998 ignore_punctuation: bool,
4999}
5000
5001impl CharClassifier {
5002 pub fn new(scope: Option<LanguageScope>) -> Self {
5003 Self {
5004 scope,
5005 for_completion: false,
5006 ignore_punctuation: false,
5007 }
5008 }
5009
5010 pub fn for_completion(self, for_completion: bool) -> Self {
5011 Self {
5012 for_completion,
5013 ..self
5014 }
5015 }
5016
5017 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5018 Self {
5019 ignore_punctuation,
5020 ..self
5021 }
5022 }
5023
5024 pub fn is_whitespace(&self, c: char) -> bool {
5025 self.kind(c) == CharKind::Whitespace
5026 }
5027
5028 pub fn is_word(&self, c: char) -> bool {
5029 self.kind(c) == CharKind::Word
5030 }
5031
5032 pub fn is_punctuation(&self, c: char) -> bool {
5033 self.kind(c) == CharKind::Punctuation
5034 }
5035
5036 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5037 if c.is_alphanumeric() || c == '_' {
5038 return CharKind::Word;
5039 }
5040
5041 if let Some(scope) = &self.scope {
5042 let characters = if self.for_completion {
5043 scope.completion_query_characters()
5044 } else {
5045 scope.word_characters()
5046 };
5047 if let Some(characters) = characters
5048 && characters.contains(&c)
5049 {
5050 return CharKind::Word;
5051 }
5052 }
5053
5054 if c.is_whitespace() {
5055 return CharKind::Whitespace;
5056 }
5057
5058 if ignore_punctuation {
5059 CharKind::Word
5060 } else {
5061 CharKind::Punctuation
5062 }
5063 }
5064
5065 pub fn kind(&self, c: char) -> CharKind {
5066 self.kind_with(c, self.ignore_punctuation)
5067 }
5068}
5069
5070/// Find all of the ranges of whitespace that occur at the ends of lines
5071/// in the given rope.
5072///
5073/// This could also be done with a regex search, but this implementation
5074/// avoids copying text.
5075pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5076 let mut ranges = Vec::new();
5077
5078 let mut offset = 0;
5079 let mut prev_chunk_trailing_whitespace_range = 0..0;
5080 for chunk in rope.chunks() {
5081 let mut prev_line_trailing_whitespace_range = 0..0;
5082 for (i, line) in chunk.split('\n').enumerate() {
5083 let line_end_offset = offset + line.len();
5084 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5085 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5086
5087 if i == 0 && trimmed_line_len == 0 {
5088 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5089 }
5090 if !prev_line_trailing_whitespace_range.is_empty() {
5091 ranges.push(prev_line_trailing_whitespace_range);
5092 }
5093
5094 offset = line_end_offset + 1;
5095 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5096 }
5097
5098 offset -= 1;
5099 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5100 }
5101
5102 if !prev_chunk_trailing_whitespace_range.is_empty() {
5103 ranges.push(prev_chunk_trailing_whitespace_range);
5104 }
5105
5106 ranges
5107}