1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let language_registry = language_registry.clone();
978 syntax.reparse(&text, language_registry, language);
979 }
980 BufferSnapshot {
981 text,
982 syntax,
983 file: None,
984 diagnostics: Default::default(),
985 remote_selections: Default::default(),
986 language,
987 non_text_state_update_count: 0,
988 }
989 }
990 }
991
992 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
993 let entity_id = cx.reserve_entity::<Self>().entity_id();
994 let buffer_id = entity_id.as_non_zero_u64().into();
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
997 let syntax = SyntaxMap::new(&text).snapshot();
998 BufferSnapshot {
999 text,
1000 syntax,
1001 file: None,
1002 diagnostics: Default::default(),
1003 remote_selections: Default::default(),
1004 language: None,
1005 non_text_state_update_count: 0,
1006 }
1007 }
1008
1009 #[cfg(any(test, feature = "test-support"))]
1010 pub fn build_snapshot_sync(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> BufferSnapshot {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1019 let mut syntax = SyntaxMap::new(&text).snapshot();
1020 if let Some(language) = language.clone() {
1021 syntax.reparse(&text, language_registry, language);
1022 }
1023 BufferSnapshot {
1024 text,
1025 syntax,
1026 file: None,
1027 diagnostics: Default::default(),
1028 remote_selections: Default::default(),
1029 language,
1030 non_text_state_update_count: 0,
1031 }
1032 }
1033
1034 /// Retrieve a snapshot of the buffer's current state. This is computationally
1035 /// cheap, and allows reading from the buffer on a background thread.
1036 pub fn snapshot(&self) -> BufferSnapshot {
1037 let text = self.text.snapshot();
1038 let mut syntax_map = self.syntax_map.lock();
1039 syntax_map.interpolate(&text);
1040 let syntax = syntax_map.snapshot();
1041
1042 BufferSnapshot {
1043 text,
1044 syntax,
1045 file: self.file.clone(),
1046 remote_selections: self.remote_selections.clone(),
1047 diagnostics: self.diagnostics.clone(),
1048 language: self.language.clone(),
1049 non_text_state_update_count: self.non_text_state_update_count,
1050 }
1051 }
1052
1053 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1054 let this = cx.entity();
1055 cx.new(|cx| {
1056 let mut branch = Self {
1057 branch_state: Some(BufferBranchState {
1058 base_buffer: this.clone(),
1059 merged_operations: Default::default(),
1060 }),
1061 language: self.language.clone(),
1062 has_conflict: self.has_conflict,
1063 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1064 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1065 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1066 };
1067 if let Some(language_registry) = self.language_registry() {
1068 branch.set_language_registry(language_registry);
1069 }
1070
1071 // Reparse the branch buffer so that we get syntax highlighting immediately.
1072 branch.reparse(cx);
1073
1074 branch
1075 })
1076 }
1077
1078 pub fn preview_edits(
1079 &self,
1080 edits: Arc<[(Range<Anchor>, String)]>,
1081 cx: &App,
1082 ) -> Task<EditPreview> {
1083 let registry = self.language_registry();
1084 let language = self.language().cloned();
1085 let old_snapshot = self.text.snapshot();
1086 let mut branch_buffer = self.text.branch();
1087 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1088 cx.background_spawn(async move {
1089 if !edits.is_empty() {
1090 if let Some(language) = language.clone() {
1091 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1092 }
1093
1094 branch_buffer.edit(edits.iter().cloned());
1095 let snapshot = branch_buffer.snapshot();
1096 syntax_snapshot.interpolate(&snapshot);
1097
1098 if let Some(language) = language {
1099 syntax_snapshot.reparse(&snapshot, registry, language);
1100 }
1101 }
1102 EditPreview {
1103 old_snapshot,
1104 applied_edits_snapshot: branch_buffer.snapshot(),
1105 syntax_snapshot,
1106 }
1107 })
1108 }
1109
1110 /// Applies all of the changes in this buffer that intersect any of the
1111 /// given `ranges` to its base buffer.
1112 ///
1113 /// If `ranges` is empty, then all changes will be applied. This buffer must
1114 /// be a branch buffer to call this method.
1115 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1116 let Some(base_buffer) = self.base_buffer() else {
1117 debug_panic!("not a branch buffer");
1118 return;
1119 };
1120
1121 let mut ranges = if ranges.is_empty() {
1122 &[0..usize::MAX]
1123 } else {
1124 ranges.as_slice()
1125 }
1126 .iter()
1127 .peekable();
1128
1129 let mut edits = Vec::new();
1130 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1131 let mut is_included = false;
1132 while let Some(range) = ranges.peek() {
1133 if range.end < edit.new.start {
1134 ranges.next().unwrap();
1135 } else {
1136 if range.start <= edit.new.end {
1137 is_included = true;
1138 }
1139 break;
1140 }
1141 }
1142
1143 if is_included {
1144 edits.push((
1145 edit.old.clone(),
1146 self.text_for_range(edit.new.clone()).collect::<String>(),
1147 ));
1148 }
1149 }
1150
1151 let operation = base_buffer.update(cx, |base_buffer, cx| {
1152 // cx.emit(BufferEvent::DiffBaseChanged);
1153 base_buffer.edit(edits, None, cx)
1154 });
1155
1156 if let Some(operation) = operation
1157 && let Some(BufferBranchState {
1158 merged_operations, ..
1159 }) = &mut self.branch_state
1160 {
1161 merged_operations.push(operation);
1162 }
1163 }
1164
1165 fn on_base_buffer_event(
1166 &mut self,
1167 _: Entity<Buffer>,
1168 event: &BufferEvent,
1169 cx: &mut Context<Self>,
1170 ) {
1171 let BufferEvent::Operation { operation, .. } = event else {
1172 return;
1173 };
1174 let Some(BufferBranchState {
1175 merged_operations, ..
1176 }) = &mut self.branch_state
1177 else {
1178 return;
1179 };
1180
1181 let mut operation_to_undo = None;
1182 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1183 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1184 {
1185 merged_operations.remove(ix);
1186 operation_to_undo = Some(operation.timestamp);
1187 }
1188
1189 self.apply_ops([operation.clone()], cx);
1190
1191 if let Some(timestamp) = operation_to_undo {
1192 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1193 self.undo_operations(counts, cx);
1194 }
1195 }
1196
1197 #[cfg(test)]
1198 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1199 &self.text
1200 }
1201
1202 /// Retrieve a snapshot of the buffer's raw text, without any
1203 /// language-related state like the syntax tree or diagnostics.
1204 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1205 self.text.snapshot()
1206 }
1207
1208 /// The file associated with the buffer, if any.
1209 pub fn file(&self) -> Option<&Arc<dyn File>> {
1210 self.file.as_ref()
1211 }
1212
1213 /// The version of the buffer that was last saved or reloaded from disk.
1214 pub fn saved_version(&self) -> &clock::Global {
1215 &self.saved_version
1216 }
1217
1218 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1219 pub fn saved_mtime(&self) -> Option<MTime> {
1220 self.saved_mtime
1221 }
1222
1223 /// Assign a language to the buffer.
1224 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1225 self.non_text_state_update_count += 1;
1226 self.syntax_map.lock().clear(&self.text);
1227 self.language = language;
1228 self.was_changed();
1229 self.reparse(cx);
1230 cx.emit(BufferEvent::LanguageChanged);
1231 }
1232
1233 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1234 /// other languages if parts of the buffer are written in different languages.
1235 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1236 self.syntax_map
1237 .lock()
1238 .set_language_registry(language_registry);
1239 }
1240
1241 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1242 self.syntax_map.lock().language_registry()
1243 }
1244
1245 /// Assign the buffer a new [`Capability`].
1246 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1247 self.capability = capability;
1248 cx.emit(BufferEvent::CapabilityChanged)
1249 }
1250
1251 /// This method is called to signal that the buffer has been saved.
1252 pub fn did_save(
1253 &mut self,
1254 version: clock::Global,
1255 mtime: Option<MTime>,
1256 cx: &mut Context<Self>,
1257 ) {
1258 self.saved_version = version;
1259 self.has_unsaved_edits
1260 .set((self.saved_version().clone(), false));
1261 self.has_conflict = false;
1262 self.saved_mtime = mtime;
1263 self.was_changed();
1264 cx.emit(BufferEvent::Saved);
1265 cx.notify();
1266 }
1267
1268 /// This method is called to signal that the buffer has been discarded.
1269 pub fn discarded(&self, cx: &mut Context<Self>) {
1270 cx.emit(BufferEvent::Discarded);
1271 cx.notify();
1272 }
1273
1274 /// Reloads the contents of the buffer from disk.
1275 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1276 let (tx, rx) = futures::channel::oneshot::channel();
1277 let prev_version = self.text.version();
1278 self.reload_task = Some(cx.spawn(async move |this, cx| {
1279 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1280 let file = this.file.as_ref()?.as_local()?;
1281
1282 Some((file.disk_state().mtime(), file.load(cx)))
1283 })?
1284 else {
1285 return Ok(());
1286 };
1287
1288 let new_text = new_text.await?;
1289 let diff = this
1290 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1291 .await;
1292 this.update(cx, |this, cx| {
1293 if this.version() == diff.base_version {
1294 this.finalize_last_transaction();
1295 this.apply_diff(diff, cx);
1296 tx.send(this.finalize_last_transaction().cloned()).ok();
1297 this.has_conflict = false;
1298 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1299 } else {
1300 if !diff.edits.is_empty()
1301 || this
1302 .edits_since::<usize>(&diff.base_version)
1303 .next()
1304 .is_some()
1305 {
1306 this.has_conflict = true;
1307 }
1308
1309 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1310 }
1311
1312 this.reload_task.take();
1313 })
1314 }));
1315 rx
1316 }
1317
1318 /// This method is called to signal that the buffer has been reloaded.
1319 pub fn did_reload(
1320 &mut self,
1321 version: clock::Global,
1322 line_ending: LineEnding,
1323 mtime: Option<MTime>,
1324 cx: &mut Context<Self>,
1325 ) {
1326 self.saved_version = version;
1327 self.has_unsaved_edits
1328 .set((self.saved_version.clone(), false));
1329 self.text.set_line_ending(line_ending);
1330 self.saved_mtime = mtime;
1331 cx.emit(BufferEvent::Reloaded);
1332 cx.notify();
1333 }
1334
1335 /// Updates the [`File`] backing this buffer. This should be called when
1336 /// the file has changed or has been deleted.
1337 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1338 let was_dirty = self.is_dirty();
1339 let mut file_changed = false;
1340
1341 if let Some(old_file) = self.file.as_ref() {
1342 if new_file.path() != old_file.path() {
1343 file_changed = true;
1344 }
1345
1346 let old_state = old_file.disk_state();
1347 let new_state = new_file.disk_state();
1348 if old_state != new_state {
1349 file_changed = true;
1350 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1351 cx.emit(BufferEvent::ReloadNeeded)
1352 }
1353 }
1354 } else {
1355 file_changed = true;
1356 };
1357
1358 self.file = Some(new_file);
1359 if file_changed {
1360 self.was_changed();
1361 self.non_text_state_update_count += 1;
1362 if was_dirty != self.is_dirty() {
1363 cx.emit(BufferEvent::DirtyChanged);
1364 }
1365 cx.emit(BufferEvent::FileHandleChanged);
1366 cx.notify();
1367 }
1368 }
1369
1370 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1371 Some(self.branch_state.as_ref()?.base_buffer.clone())
1372 }
1373
1374 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1375 pub fn language(&self) -> Option<&Arc<Language>> {
1376 self.language.as_ref()
1377 }
1378
1379 /// Returns the [`Language`] at the given location.
1380 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1381 let offset = position.to_offset(self);
1382 let mut is_first = true;
1383 let start_anchor = self.anchor_before(offset);
1384 let end_anchor = self.anchor_after(offset);
1385 self.syntax_map
1386 .lock()
1387 .layers_for_range(offset..offset, &self.text, false)
1388 .filter(|layer| {
1389 if is_first {
1390 is_first = false;
1391 return true;
1392 }
1393
1394 layer
1395 .included_sub_ranges
1396 .map(|sub_ranges| {
1397 sub_ranges.iter().any(|sub_range| {
1398 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1399 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1400 !is_before_start && !is_after_end
1401 })
1402 })
1403 .unwrap_or(true)
1404 })
1405 .last()
1406 .map(|info| info.language.clone())
1407 .or_else(|| self.language.clone())
1408 }
1409
1410 /// Returns each [`Language`] for the active syntax layers at the given location.
1411 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1412 let offset = position.to_offset(self);
1413 let mut languages: Vec<Arc<Language>> = self
1414 .syntax_map
1415 .lock()
1416 .layers_for_range(offset..offset, &self.text, false)
1417 .map(|info| info.language.clone())
1418 .collect();
1419
1420 if languages.is_empty()
1421 && let Some(buffer_language) = self.language()
1422 {
1423 languages.push(buffer_language.clone());
1424 }
1425
1426 languages
1427 }
1428
1429 /// An integer version number that accounts for all updates besides
1430 /// the buffer's text itself (which is versioned via a version vector).
1431 pub fn non_text_state_update_count(&self) -> usize {
1432 self.non_text_state_update_count
1433 }
1434
1435 /// Whether the buffer is being parsed in the background.
1436 #[cfg(any(test, feature = "test-support"))]
1437 pub fn is_parsing(&self) -> bool {
1438 self.reparse.is_some()
1439 }
1440
1441 /// Indicates whether the buffer contains any regions that may be
1442 /// written in a language that hasn't been loaded yet.
1443 pub fn contains_unknown_injections(&self) -> bool {
1444 self.syntax_map.lock().contains_unknown_injections()
1445 }
1446
1447 #[cfg(any(test, feature = "test-support"))]
1448 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1449 self.sync_parse_timeout = timeout;
1450 }
1451
1452 /// Called after an edit to synchronize the buffer's main parse tree with
1453 /// the buffer's new underlying state.
1454 ///
1455 /// Locks the syntax map and interpolates the edits since the last reparse
1456 /// into the foreground syntax tree.
1457 ///
1458 /// Then takes a stable snapshot of the syntax map before unlocking it.
1459 /// The snapshot with the interpolated edits is sent to a background thread,
1460 /// where we ask Tree-sitter to perform an incremental parse.
1461 ///
1462 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1463 /// waiting on the parse to complete. As soon as it completes, we proceed
1464 /// synchronously, unless a 1ms timeout elapses.
1465 ///
1466 /// If we time out waiting on the parse, we spawn a second task waiting
1467 /// until the parse does complete and return with the interpolated tree still
1468 /// in the foreground. When the background parse completes, call back into
1469 /// the main thread and assign the foreground parse state.
1470 ///
1471 /// If the buffer or grammar changed since the start of the background parse,
1472 /// initiate an additional reparse recursively. To avoid concurrent parses
1473 /// for the same buffer, we only initiate a new parse if we are not already
1474 /// parsing in the background.
1475 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1476 if self.reparse.is_some() {
1477 return;
1478 }
1479 let language = if let Some(language) = self.language.clone() {
1480 language
1481 } else {
1482 return;
1483 };
1484
1485 let text = self.text_snapshot();
1486 let parsed_version = self.version();
1487
1488 let mut syntax_map = self.syntax_map.lock();
1489 syntax_map.interpolate(&text);
1490 let language_registry = syntax_map.language_registry();
1491 let mut syntax_snapshot = syntax_map.snapshot();
1492 drop(syntax_map);
1493
1494 let parse_task = cx.background_spawn({
1495 let language = language.clone();
1496 let language_registry = language_registry.clone();
1497 async move {
1498 syntax_snapshot.reparse(&text, language_registry, language);
1499 syntax_snapshot
1500 }
1501 });
1502
1503 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1504 match cx
1505 .background_executor()
1506 .block_with_timeout(self.sync_parse_timeout, parse_task)
1507 {
1508 Ok(new_syntax_snapshot) => {
1509 self.did_finish_parsing(new_syntax_snapshot, cx);
1510 self.reparse = None;
1511 }
1512 Err(parse_task) => {
1513 self.reparse = Some(cx.spawn(async move |this, cx| {
1514 let new_syntax_map = parse_task.await;
1515 this.update(cx, move |this, cx| {
1516 let grammar_changed =
1517 this.language.as_ref().is_none_or(|current_language| {
1518 !Arc::ptr_eq(&language, current_language)
1519 });
1520 let language_registry_changed = new_syntax_map
1521 .contains_unknown_injections()
1522 && language_registry.is_some_and(|registry| {
1523 registry.version() != new_syntax_map.language_registry_version()
1524 });
1525 let parse_again = language_registry_changed
1526 || grammar_changed
1527 || this.version.changed_since(&parsed_version);
1528 this.did_finish_parsing(new_syntax_map, cx);
1529 this.reparse = None;
1530 if parse_again {
1531 this.reparse(cx);
1532 }
1533 })
1534 .ok();
1535 }));
1536 }
1537 }
1538 }
1539
1540 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1541 self.was_changed();
1542 self.non_text_state_update_count += 1;
1543 self.syntax_map.lock().did_parse(syntax_snapshot);
1544 self.request_autoindent(cx);
1545 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1546 cx.emit(BufferEvent::Reparsed);
1547 cx.notify();
1548 }
1549
1550 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1551 self.parse_status.1.clone()
1552 }
1553
1554 /// Assign to the buffer a set of diagnostics created by a given language server.
1555 pub fn update_diagnostics(
1556 &mut self,
1557 server_id: LanguageServerId,
1558 diagnostics: DiagnosticSet,
1559 cx: &mut Context<Self>,
1560 ) {
1561 let lamport_timestamp = self.text.lamport_clock.tick();
1562 let op = Operation::UpdateDiagnostics {
1563 server_id,
1564 diagnostics: diagnostics.iter().cloned().collect(),
1565 lamport_timestamp,
1566 };
1567
1568 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1569 self.send_operation(op, true, cx);
1570 }
1571
1572 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1573 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1574 return None;
1575 };
1576 Some(&self.diagnostics[idx].1)
1577 }
1578
1579 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1580 if let Some(indent_sizes) = self.compute_autoindents() {
1581 let indent_sizes = cx.background_spawn(indent_sizes);
1582 match cx
1583 .background_executor()
1584 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1585 {
1586 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1587 Err(indent_sizes) => {
1588 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1589 let indent_sizes = indent_sizes.await;
1590 this.update(cx, |this, cx| {
1591 this.apply_autoindents(indent_sizes, cx);
1592 })
1593 .ok();
1594 }));
1595 }
1596 }
1597 } else {
1598 self.autoindent_requests.clear();
1599 for tx in self.wait_for_autoindent_txs.drain(..) {
1600 tx.send(()).ok();
1601 }
1602 }
1603 }
1604
1605 fn compute_autoindents(
1606 &self,
1607 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1608 let max_rows_between_yields = 100;
1609 let snapshot = self.snapshot();
1610 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1611 return None;
1612 }
1613
1614 let autoindent_requests = self.autoindent_requests.clone();
1615 Some(async move {
1616 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1617 for request in autoindent_requests {
1618 // Resolve each edited range to its row in the current buffer and in the
1619 // buffer before this batch of edits.
1620 let mut row_ranges = Vec::new();
1621 let mut old_to_new_rows = BTreeMap::new();
1622 let mut language_indent_sizes_by_new_row = Vec::new();
1623 for entry in &request.entries {
1624 let position = entry.range.start;
1625 let new_row = position.to_point(&snapshot).row;
1626 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1627 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1628
1629 if !entry.first_line_is_new {
1630 let old_row = position.to_point(&request.before_edit).row;
1631 old_to_new_rows.insert(old_row, new_row);
1632 }
1633 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1634 }
1635
1636 // Build a map containing the suggested indentation for each of the edited lines
1637 // with respect to the state of the buffer before these edits. This map is keyed
1638 // by the rows for these lines in the current state of the buffer.
1639 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1640 let old_edited_ranges =
1641 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1642 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1643 let mut language_indent_size = IndentSize::default();
1644 for old_edited_range in old_edited_ranges {
1645 let suggestions = request
1646 .before_edit
1647 .suggest_autoindents(old_edited_range.clone())
1648 .into_iter()
1649 .flatten();
1650 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1651 if let Some(suggestion) = suggestion {
1652 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1653
1654 // Find the indent size based on the language for this row.
1655 while let Some((row, size)) = language_indent_sizes.peek() {
1656 if *row > new_row {
1657 break;
1658 }
1659 language_indent_size = *size;
1660 language_indent_sizes.next();
1661 }
1662
1663 let suggested_indent = old_to_new_rows
1664 .get(&suggestion.basis_row)
1665 .and_then(|from_row| {
1666 Some(old_suggestions.get(from_row).copied()?.0)
1667 })
1668 .unwrap_or_else(|| {
1669 request
1670 .before_edit
1671 .indent_size_for_line(suggestion.basis_row)
1672 })
1673 .with_delta(suggestion.delta, language_indent_size);
1674 old_suggestions
1675 .insert(new_row, (suggested_indent, suggestion.within_error));
1676 }
1677 }
1678 yield_now().await;
1679 }
1680
1681 // Compute new suggestions for each line, but only include them in the result
1682 // if they differ from the old suggestion for that line.
1683 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1684 let mut language_indent_size = IndentSize::default();
1685 for (row_range, original_indent_column) in row_ranges {
1686 let new_edited_row_range = if request.is_block_mode {
1687 row_range.start..row_range.start + 1
1688 } else {
1689 row_range.clone()
1690 };
1691
1692 let suggestions = snapshot
1693 .suggest_autoindents(new_edited_row_range.clone())
1694 .into_iter()
1695 .flatten();
1696 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1697 if let Some(suggestion) = suggestion {
1698 // Find the indent size based on the language for this row.
1699 while let Some((row, size)) = language_indent_sizes.peek() {
1700 if *row > new_row {
1701 break;
1702 }
1703 language_indent_size = *size;
1704 language_indent_sizes.next();
1705 }
1706
1707 let suggested_indent = indent_sizes
1708 .get(&suggestion.basis_row)
1709 .copied()
1710 .map(|e| e.0)
1711 .unwrap_or_else(|| {
1712 snapshot.indent_size_for_line(suggestion.basis_row)
1713 })
1714 .with_delta(suggestion.delta, language_indent_size);
1715
1716 if old_suggestions.get(&new_row).is_none_or(
1717 |(old_indentation, was_within_error)| {
1718 suggested_indent != *old_indentation
1719 && (!suggestion.within_error || *was_within_error)
1720 },
1721 ) {
1722 indent_sizes.insert(
1723 new_row,
1724 (suggested_indent, request.ignore_empty_lines),
1725 );
1726 }
1727 }
1728 }
1729
1730 if let (true, Some(original_indent_column)) =
1731 (request.is_block_mode, original_indent_column)
1732 {
1733 let new_indent =
1734 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1735 *indent
1736 } else {
1737 snapshot.indent_size_for_line(row_range.start)
1738 };
1739 let delta = new_indent.len as i64 - original_indent_column as i64;
1740 if delta != 0 {
1741 for row in row_range.skip(1) {
1742 indent_sizes.entry(row).or_insert_with(|| {
1743 let mut size = snapshot.indent_size_for_line(row);
1744 if size.kind == new_indent.kind {
1745 match delta.cmp(&0) {
1746 Ordering::Greater => size.len += delta as u32,
1747 Ordering::Less => {
1748 size.len = size.len.saturating_sub(-delta as u32)
1749 }
1750 Ordering::Equal => {}
1751 }
1752 }
1753 (size, request.ignore_empty_lines)
1754 });
1755 }
1756 }
1757 }
1758
1759 yield_now().await;
1760 }
1761 }
1762
1763 indent_sizes
1764 .into_iter()
1765 .filter_map(|(row, (indent, ignore_empty_lines))| {
1766 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1767 None
1768 } else {
1769 Some((row, indent))
1770 }
1771 })
1772 .collect()
1773 })
1774 }
1775
1776 fn apply_autoindents(
1777 &mut self,
1778 indent_sizes: BTreeMap<u32, IndentSize>,
1779 cx: &mut Context<Self>,
1780 ) {
1781 self.autoindent_requests.clear();
1782 for tx in self.wait_for_autoindent_txs.drain(..) {
1783 tx.send(()).ok();
1784 }
1785
1786 let edits: Vec<_> = indent_sizes
1787 .into_iter()
1788 .filter_map(|(row, indent_size)| {
1789 let current_size = indent_size_for_line(self, row);
1790 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1791 })
1792 .collect();
1793
1794 let preserve_preview = self.preserve_preview();
1795 self.edit(edits, None, cx);
1796 if preserve_preview {
1797 self.refresh_preview();
1798 }
1799 }
1800
1801 /// Create a minimal edit that will cause the given row to be indented
1802 /// with the given size. After applying this edit, the length of the line
1803 /// will always be at least `new_size.len`.
1804 pub fn edit_for_indent_size_adjustment(
1805 row: u32,
1806 current_size: IndentSize,
1807 new_size: IndentSize,
1808 ) -> Option<(Range<Point>, String)> {
1809 if new_size.kind == current_size.kind {
1810 match new_size.len.cmp(¤t_size.len) {
1811 Ordering::Greater => {
1812 let point = Point::new(row, 0);
1813 Some((
1814 point..point,
1815 iter::repeat(new_size.char())
1816 .take((new_size.len - current_size.len) as usize)
1817 .collect::<String>(),
1818 ))
1819 }
1820
1821 Ordering::Less => Some((
1822 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1823 String::new(),
1824 )),
1825
1826 Ordering::Equal => None,
1827 }
1828 } else {
1829 Some((
1830 Point::new(row, 0)..Point::new(row, current_size.len),
1831 iter::repeat(new_size.char())
1832 .take(new_size.len as usize)
1833 .collect::<String>(),
1834 ))
1835 }
1836 }
1837
1838 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1839 /// and the given new text.
1840 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1841 let old_text = self.as_rope().clone();
1842 let base_version = self.version();
1843 cx.background_executor()
1844 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1845 let old_text = old_text.to_string();
1846 let line_ending = LineEnding::detect(&new_text);
1847 LineEnding::normalize(&mut new_text);
1848 let edits = text_diff(&old_text, &new_text);
1849 Diff {
1850 base_version,
1851 line_ending,
1852 edits,
1853 }
1854 })
1855 }
1856
1857 /// Spawns a background task that searches the buffer for any whitespace
1858 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1859 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1860 let old_text = self.as_rope().clone();
1861 let line_ending = self.line_ending();
1862 let base_version = self.version();
1863 cx.background_spawn(async move {
1864 let ranges = trailing_whitespace_ranges(&old_text);
1865 let empty = Arc::<str>::from("");
1866 Diff {
1867 base_version,
1868 line_ending,
1869 edits: ranges
1870 .into_iter()
1871 .map(|range| (range, empty.clone()))
1872 .collect(),
1873 }
1874 })
1875 }
1876
1877 /// Ensures that the buffer ends with a single newline character, and
1878 /// no other whitespace. Skips if the buffer is empty.
1879 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1880 let len = self.len();
1881 if len == 0 {
1882 return;
1883 }
1884 let mut offset = len;
1885 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1886 let non_whitespace_len = chunk
1887 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1888 .len();
1889 offset -= chunk.len();
1890 offset += non_whitespace_len;
1891 if non_whitespace_len != 0 {
1892 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1893 return;
1894 }
1895 break;
1896 }
1897 }
1898 self.edit([(offset..len, "\n")], None, cx);
1899 }
1900
1901 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1902 /// calculated, then adjust the diff to account for those changes, and discard any
1903 /// parts of the diff that conflict with those changes.
1904 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1905 let snapshot = self.snapshot();
1906 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1907 let mut delta = 0;
1908 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1909 while let Some(edit_since) = edits_since.peek() {
1910 // If the edit occurs after a diff hunk, then it does not
1911 // affect that hunk.
1912 if edit_since.old.start > range.end {
1913 break;
1914 }
1915 // If the edit precedes the diff hunk, then adjust the hunk
1916 // to reflect the edit.
1917 else if edit_since.old.end < range.start {
1918 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1919 edits_since.next();
1920 }
1921 // If the edit intersects a diff hunk, then discard that hunk.
1922 else {
1923 return None;
1924 }
1925 }
1926
1927 let start = (range.start as i64 + delta) as usize;
1928 let end = (range.end as i64 + delta) as usize;
1929 Some((start..end, new_text))
1930 });
1931
1932 self.start_transaction();
1933 self.text.set_line_ending(diff.line_ending);
1934 self.edit(adjusted_edits, None, cx);
1935 self.end_transaction(cx)
1936 }
1937
1938 fn has_unsaved_edits(&self) -> bool {
1939 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1940
1941 if last_version == self.version {
1942 self.has_unsaved_edits
1943 .set((last_version, has_unsaved_edits));
1944 return has_unsaved_edits;
1945 }
1946
1947 let has_edits = self.has_edits_since(&self.saved_version);
1948 self.has_unsaved_edits
1949 .set((self.version.clone(), has_edits));
1950 has_edits
1951 }
1952
1953 /// Checks if the buffer has unsaved changes.
1954 pub fn is_dirty(&self) -> bool {
1955 if self.capability == Capability::ReadOnly {
1956 return false;
1957 }
1958 if self.has_conflict {
1959 return true;
1960 }
1961 match self.file.as_ref().map(|f| f.disk_state()) {
1962 Some(DiskState::New) | Some(DiskState::Deleted) => {
1963 !self.is_empty() && self.has_unsaved_edits()
1964 }
1965 _ => self.has_unsaved_edits(),
1966 }
1967 }
1968
1969 /// Checks if the buffer and its file have both changed since the buffer
1970 /// was last saved or reloaded.
1971 pub fn has_conflict(&self) -> bool {
1972 if self.has_conflict {
1973 return true;
1974 }
1975 let Some(file) = self.file.as_ref() else {
1976 return false;
1977 };
1978 match file.disk_state() {
1979 DiskState::New => false,
1980 DiskState::Present { mtime } => match self.saved_mtime {
1981 Some(saved_mtime) => {
1982 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1983 }
1984 None => true,
1985 },
1986 DiskState::Deleted => false,
1987 }
1988 }
1989
1990 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1991 pub fn subscribe(&mut self) -> Subscription {
1992 self.text.subscribe()
1993 }
1994
1995 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1996 ///
1997 /// This allows downstream code to check if the buffer's text has changed without
1998 /// waiting for an effect cycle, which would be required if using eents.
1999 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2000 if let Err(ix) = self
2001 .change_bits
2002 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2003 {
2004 self.change_bits.insert(ix, bit);
2005 }
2006 }
2007
2008 fn was_changed(&mut self) {
2009 self.change_bits.retain(|change_bit| {
2010 change_bit.upgrade().is_some_and(|bit| {
2011 bit.replace(true);
2012 true
2013 })
2014 });
2015 }
2016
2017 /// Starts a transaction, if one is not already in-progress. When undoing or
2018 /// redoing edits, all of the edits performed within a transaction are undone
2019 /// or redone together.
2020 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2021 self.start_transaction_at(Instant::now())
2022 }
2023
2024 /// Starts a transaction, providing the current time. Subsequent transactions
2025 /// that occur within a short period of time will be grouped together. This
2026 /// is controlled by the buffer's undo grouping duration.
2027 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2028 self.transaction_depth += 1;
2029 if self.was_dirty_before_starting_transaction.is_none() {
2030 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2031 }
2032 self.text.start_transaction_at(now)
2033 }
2034
2035 /// Terminates the current transaction, if this is the outermost transaction.
2036 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2037 self.end_transaction_at(Instant::now(), cx)
2038 }
2039
2040 /// Terminates the current transaction, providing the current time. Subsequent transactions
2041 /// that occur within a short period of time will be grouped together. This
2042 /// is controlled by the buffer's undo grouping duration.
2043 pub fn end_transaction_at(
2044 &mut self,
2045 now: Instant,
2046 cx: &mut Context<Self>,
2047 ) -> Option<TransactionId> {
2048 assert!(self.transaction_depth > 0);
2049 self.transaction_depth -= 1;
2050 let was_dirty = if self.transaction_depth == 0 {
2051 self.was_dirty_before_starting_transaction.take().unwrap()
2052 } else {
2053 false
2054 };
2055 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2056 self.did_edit(&start_version, was_dirty, cx);
2057 Some(transaction_id)
2058 } else {
2059 None
2060 }
2061 }
2062
2063 /// Manually add a transaction to the buffer's undo history.
2064 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2065 self.text.push_transaction(transaction, now);
2066 }
2067
2068 /// Differs from `push_transaction` in that it does not clear the redo
2069 /// stack. Intended to be used to create a parent transaction to merge
2070 /// potential child transactions into.
2071 ///
2072 /// The caller is responsible for removing it from the undo history using
2073 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2074 /// are merged into this transaction, the caller is responsible for ensuring
2075 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2076 /// cleared is to create transactions with the usual `start_transaction` and
2077 /// `end_transaction` methods and merging the resulting transactions into
2078 /// the transaction created by this method
2079 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2080 self.text.push_empty_transaction(now)
2081 }
2082
2083 /// Prevent the last transaction from being grouped with any subsequent transactions,
2084 /// even if they occur with the buffer's undo grouping duration.
2085 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2086 self.text.finalize_last_transaction()
2087 }
2088
2089 /// Manually group all changes since a given transaction.
2090 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2091 self.text.group_until_transaction(transaction_id);
2092 }
2093
2094 /// Manually remove a transaction from the buffer's undo history
2095 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2096 self.text.forget_transaction(transaction_id)
2097 }
2098
2099 /// Retrieve a transaction from the buffer's undo history
2100 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2101 self.text.get_transaction(transaction_id)
2102 }
2103
2104 /// Manually merge two transactions in the buffer's undo history.
2105 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2106 self.text.merge_transactions(transaction, destination);
2107 }
2108
2109 /// Waits for the buffer to receive operations with the given timestamps.
2110 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2111 &mut self,
2112 edit_ids: It,
2113 ) -> impl Future<Output = Result<()>> + use<It> {
2114 self.text.wait_for_edits(edit_ids)
2115 }
2116
2117 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2118 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2119 &mut self,
2120 anchors: It,
2121 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2122 self.text.wait_for_anchors(anchors)
2123 }
2124
2125 /// Waits for the buffer to receive operations up to the given version.
2126 pub fn wait_for_version(
2127 &mut self,
2128 version: clock::Global,
2129 ) -> impl Future<Output = Result<()>> + use<> {
2130 self.text.wait_for_version(version)
2131 }
2132
2133 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2134 /// [`Buffer::wait_for_version`] to resolve with an error.
2135 pub fn give_up_waiting(&mut self) {
2136 self.text.give_up_waiting();
2137 }
2138
2139 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2140 let mut rx = None;
2141 if !self.autoindent_requests.is_empty() {
2142 let channel = oneshot::channel();
2143 self.wait_for_autoindent_txs.push(channel.0);
2144 rx = Some(channel.1);
2145 }
2146 rx
2147 }
2148
2149 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2150 pub fn set_active_selections(
2151 &mut self,
2152 selections: Arc<[Selection<Anchor>]>,
2153 line_mode: bool,
2154 cursor_shape: CursorShape,
2155 cx: &mut Context<Self>,
2156 ) {
2157 let lamport_timestamp = self.text.lamport_clock.tick();
2158 self.remote_selections.insert(
2159 self.text.replica_id(),
2160 SelectionSet {
2161 selections: selections.clone(),
2162 lamport_timestamp,
2163 line_mode,
2164 cursor_shape,
2165 },
2166 );
2167 self.send_operation(
2168 Operation::UpdateSelections {
2169 selections,
2170 line_mode,
2171 lamport_timestamp,
2172 cursor_shape,
2173 },
2174 true,
2175 cx,
2176 );
2177 self.non_text_state_update_count += 1;
2178 cx.notify();
2179 }
2180
2181 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2182 /// this replica.
2183 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2184 if self
2185 .remote_selections
2186 .get(&self.text.replica_id())
2187 .is_none_or(|set| !set.selections.is_empty())
2188 {
2189 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2190 }
2191 }
2192
2193 pub fn set_agent_selections(
2194 &mut self,
2195 selections: Arc<[Selection<Anchor>]>,
2196 line_mode: bool,
2197 cursor_shape: CursorShape,
2198 cx: &mut Context<Self>,
2199 ) {
2200 let lamport_timestamp = self.text.lamport_clock.tick();
2201 self.remote_selections.insert(
2202 AGENT_REPLICA_ID,
2203 SelectionSet {
2204 selections,
2205 lamport_timestamp,
2206 line_mode,
2207 cursor_shape,
2208 },
2209 );
2210 self.non_text_state_update_count += 1;
2211 cx.notify();
2212 }
2213
2214 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2215 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2216 }
2217
2218 /// Replaces the buffer's entire text.
2219 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2220 where
2221 T: Into<Arc<str>>,
2222 {
2223 self.autoindent_requests.clear();
2224 self.edit([(0..self.len(), text)], None, cx)
2225 }
2226
2227 /// Appends the given text to the end of the buffer.
2228 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2229 where
2230 T: Into<Arc<str>>,
2231 {
2232 self.edit([(self.len()..self.len(), text)], None, cx)
2233 }
2234
2235 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2236 /// delete, and a string of text to insert at that location.
2237 ///
2238 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2239 /// request for the edited ranges, which will be processed when the buffer finishes
2240 /// parsing.
2241 ///
2242 /// Parsing takes place at the end of a transaction, and may compute synchronously
2243 /// or asynchronously, depending on the changes.
2244 pub fn edit<I, S, T>(
2245 &mut self,
2246 edits_iter: I,
2247 autoindent_mode: Option<AutoindentMode>,
2248 cx: &mut Context<Self>,
2249 ) -> Option<clock::Lamport>
2250 where
2251 I: IntoIterator<Item = (Range<S>, T)>,
2252 S: ToOffset,
2253 T: Into<Arc<str>>,
2254 {
2255 // Skip invalid edits and coalesce contiguous ones.
2256 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2257
2258 for (range, new_text) in edits_iter {
2259 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2260
2261 if range.start > range.end {
2262 mem::swap(&mut range.start, &mut range.end);
2263 }
2264 let new_text = new_text.into();
2265 if !new_text.is_empty() || !range.is_empty() {
2266 if let Some((prev_range, prev_text)) = edits.last_mut()
2267 && prev_range.end >= range.start
2268 {
2269 prev_range.end = cmp::max(prev_range.end, range.end);
2270 *prev_text = format!("{prev_text}{new_text}").into();
2271 } else {
2272 edits.push((range, new_text));
2273 }
2274 }
2275 }
2276 if edits.is_empty() {
2277 return None;
2278 }
2279
2280 self.start_transaction();
2281 self.pending_autoindent.take();
2282 let autoindent_request = autoindent_mode
2283 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2284
2285 let edit_operation = self.text.edit(edits.iter().cloned());
2286 let edit_id = edit_operation.timestamp();
2287
2288 if let Some((before_edit, mode)) = autoindent_request {
2289 let mut delta = 0isize;
2290 let mut previous_setting = None;
2291 let entries: Vec<_> = edits
2292 .into_iter()
2293 .enumerate()
2294 .zip(&edit_operation.as_edit().unwrap().new_text)
2295 .filter(|((_, (range, _)), _)| {
2296 let language = before_edit.language_at(range.start);
2297 let language_id = language.map(|l| l.id());
2298 if let Some((cached_language_id, auto_indent)) = previous_setting
2299 && cached_language_id == language_id
2300 {
2301 auto_indent
2302 } else {
2303 // The auto-indent setting is not present in editorconfigs, hence
2304 // we can avoid passing the file here.
2305 let auto_indent =
2306 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2307 previous_setting = Some((language_id, auto_indent));
2308 auto_indent
2309 }
2310 })
2311 .map(|((ix, (range, _)), new_text)| {
2312 let new_text_length = new_text.len();
2313 let old_start = range.start.to_point(&before_edit);
2314 let new_start = (delta + range.start as isize) as usize;
2315 let range_len = range.end - range.start;
2316 delta += new_text_length as isize - range_len as isize;
2317
2318 // Decide what range of the insertion to auto-indent, and whether
2319 // the first line of the insertion should be considered a newly-inserted line
2320 // or an edit to an existing line.
2321 let mut range_of_insertion_to_indent = 0..new_text_length;
2322 let mut first_line_is_new = true;
2323
2324 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2325 let old_line_end = before_edit.line_len(old_start.row);
2326
2327 if old_start.column > old_line_start {
2328 first_line_is_new = false;
2329 }
2330
2331 if !new_text.contains('\n')
2332 && (old_start.column + (range_len as u32) < old_line_end
2333 || old_line_end == old_line_start)
2334 {
2335 first_line_is_new = false;
2336 }
2337
2338 // When inserting text starting with a newline, avoid auto-indenting the
2339 // previous line.
2340 if new_text.starts_with('\n') {
2341 range_of_insertion_to_indent.start += 1;
2342 first_line_is_new = true;
2343 }
2344
2345 let mut original_indent_column = None;
2346 if let AutoindentMode::Block {
2347 original_indent_columns,
2348 } = &mode
2349 {
2350 original_indent_column = Some(if new_text.starts_with('\n') {
2351 indent_size_for_text(
2352 new_text[range_of_insertion_to_indent.clone()].chars(),
2353 )
2354 .len
2355 } else {
2356 original_indent_columns
2357 .get(ix)
2358 .copied()
2359 .flatten()
2360 .unwrap_or_else(|| {
2361 indent_size_for_text(
2362 new_text[range_of_insertion_to_indent.clone()].chars(),
2363 )
2364 .len
2365 })
2366 });
2367
2368 // Avoid auto-indenting the line after the edit.
2369 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2370 range_of_insertion_to_indent.end -= 1;
2371 }
2372 }
2373
2374 AutoindentRequestEntry {
2375 first_line_is_new,
2376 original_indent_column,
2377 indent_size: before_edit.language_indent_size_at(range.start, cx),
2378 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2379 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2380 }
2381 })
2382 .collect();
2383
2384 if !entries.is_empty() {
2385 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2386 before_edit,
2387 entries,
2388 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2389 ignore_empty_lines: false,
2390 }));
2391 }
2392 }
2393
2394 self.end_transaction(cx);
2395 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2396 Some(edit_id)
2397 }
2398
2399 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2400 self.was_changed();
2401
2402 if self.edits_since::<usize>(old_version).next().is_none() {
2403 return;
2404 }
2405
2406 self.reparse(cx);
2407 cx.emit(BufferEvent::Edited);
2408 if was_dirty != self.is_dirty() {
2409 cx.emit(BufferEvent::DirtyChanged);
2410 }
2411 cx.notify();
2412 }
2413
2414 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2415 where
2416 I: IntoIterator<Item = Range<T>>,
2417 T: ToOffset + Copy,
2418 {
2419 let before_edit = self.snapshot();
2420 let entries = ranges
2421 .into_iter()
2422 .map(|range| AutoindentRequestEntry {
2423 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2424 first_line_is_new: true,
2425 indent_size: before_edit.language_indent_size_at(range.start, cx),
2426 original_indent_column: None,
2427 })
2428 .collect();
2429 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2430 before_edit,
2431 entries,
2432 is_block_mode: false,
2433 ignore_empty_lines: true,
2434 }));
2435 self.request_autoindent(cx);
2436 }
2437
2438 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2439 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2440 pub fn insert_empty_line(
2441 &mut self,
2442 position: impl ToPoint,
2443 space_above: bool,
2444 space_below: bool,
2445 cx: &mut Context<Self>,
2446 ) -> Point {
2447 let mut position = position.to_point(self);
2448
2449 self.start_transaction();
2450
2451 self.edit(
2452 [(position..position, "\n")],
2453 Some(AutoindentMode::EachLine),
2454 cx,
2455 );
2456
2457 if position.column > 0 {
2458 position += Point::new(1, 0);
2459 }
2460
2461 if !self.is_line_blank(position.row) {
2462 self.edit(
2463 [(position..position, "\n")],
2464 Some(AutoindentMode::EachLine),
2465 cx,
2466 );
2467 }
2468
2469 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2470 self.edit(
2471 [(position..position, "\n")],
2472 Some(AutoindentMode::EachLine),
2473 cx,
2474 );
2475 position.row += 1;
2476 }
2477
2478 if space_below
2479 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2480 {
2481 self.edit(
2482 [(position..position, "\n")],
2483 Some(AutoindentMode::EachLine),
2484 cx,
2485 );
2486 }
2487
2488 self.end_transaction(cx);
2489
2490 position
2491 }
2492
2493 /// Applies the given remote operations to the buffer.
2494 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2495 self.pending_autoindent.take();
2496 let was_dirty = self.is_dirty();
2497 let old_version = self.version.clone();
2498 let mut deferred_ops = Vec::new();
2499 let buffer_ops = ops
2500 .into_iter()
2501 .filter_map(|op| match op {
2502 Operation::Buffer(op) => Some(op),
2503 _ => {
2504 if self.can_apply_op(&op) {
2505 self.apply_op(op, cx);
2506 } else {
2507 deferred_ops.push(op);
2508 }
2509 None
2510 }
2511 })
2512 .collect::<Vec<_>>();
2513 for operation in buffer_ops.iter() {
2514 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2515 }
2516 self.text.apply_ops(buffer_ops);
2517 self.deferred_ops.insert(deferred_ops);
2518 self.flush_deferred_ops(cx);
2519 self.did_edit(&old_version, was_dirty, cx);
2520 // Notify independently of whether the buffer was edited as the operations could include a
2521 // selection update.
2522 cx.notify();
2523 }
2524
2525 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2526 let mut deferred_ops = Vec::new();
2527 for op in self.deferred_ops.drain().iter().cloned() {
2528 if self.can_apply_op(&op) {
2529 self.apply_op(op, cx);
2530 } else {
2531 deferred_ops.push(op);
2532 }
2533 }
2534 self.deferred_ops.insert(deferred_ops);
2535 }
2536
2537 pub fn has_deferred_ops(&self) -> bool {
2538 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2539 }
2540
2541 fn can_apply_op(&self, operation: &Operation) -> bool {
2542 match operation {
2543 Operation::Buffer(_) => {
2544 unreachable!("buffer operations should never be applied at this layer")
2545 }
2546 Operation::UpdateDiagnostics {
2547 diagnostics: diagnostic_set,
2548 ..
2549 } => diagnostic_set.iter().all(|diagnostic| {
2550 self.text.can_resolve(&diagnostic.range.start)
2551 && self.text.can_resolve(&diagnostic.range.end)
2552 }),
2553 Operation::UpdateSelections { selections, .. } => selections
2554 .iter()
2555 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2556 Operation::UpdateCompletionTriggers { .. } => true,
2557 }
2558 }
2559
2560 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2561 match operation {
2562 Operation::Buffer(_) => {
2563 unreachable!("buffer operations should never be applied at this layer")
2564 }
2565 Operation::UpdateDiagnostics {
2566 server_id,
2567 diagnostics: diagnostic_set,
2568 lamport_timestamp,
2569 } => {
2570 let snapshot = self.snapshot();
2571 self.apply_diagnostic_update(
2572 server_id,
2573 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2574 lamport_timestamp,
2575 cx,
2576 );
2577 }
2578 Operation::UpdateSelections {
2579 selections,
2580 lamport_timestamp,
2581 line_mode,
2582 cursor_shape,
2583 } => {
2584 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2585 && set.lamport_timestamp > lamport_timestamp
2586 {
2587 return;
2588 }
2589
2590 self.remote_selections.insert(
2591 lamport_timestamp.replica_id,
2592 SelectionSet {
2593 selections,
2594 lamport_timestamp,
2595 line_mode,
2596 cursor_shape,
2597 },
2598 );
2599 self.text.lamport_clock.observe(lamport_timestamp);
2600 self.non_text_state_update_count += 1;
2601 }
2602 Operation::UpdateCompletionTriggers {
2603 triggers,
2604 lamport_timestamp,
2605 server_id,
2606 } => {
2607 if triggers.is_empty() {
2608 self.completion_triggers_per_language_server
2609 .remove(&server_id);
2610 self.completion_triggers = self
2611 .completion_triggers_per_language_server
2612 .values()
2613 .flat_map(|triggers| triggers.iter().cloned())
2614 .collect();
2615 } else {
2616 self.completion_triggers_per_language_server
2617 .insert(server_id, triggers.iter().cloned().collect());
2618 self.completion_triggers.extend(triggers);
2619 }
2620 self.text.lamport_clock.observe(lamport_timestamp);
2621 }
2622 }
2623 }
2624
2625 fn apply_diagnostic_update(
2626 &mut self,
2627 server_id: LanguageServerId,
2628 diagnostics: DiagnosticSet,
2629 lamport_timestamp: clock::Lamport,
2630 cx: &mut Context<Self>,
2631 ) {
2632 if lamport_timestamp > self.diagnostics_timestamp {
2633 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2634 if diagnostics.is_empty() {
2635 if let Ok(ix) = ix {
2636 self.diagnostics.remove(ix);
2637 }
2638 } else {
2639 match ix {
2640 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2641 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2642 };
2643 }
2644 self.diagnostics_timestamp = lamport_timestamp;
2645 self.non_text_state_update_count += 1;
2646 self.text.lamport_clock.observe(lamport_timestamp);
2647 cx.notify();
2648 cx.emit(BufferEvent::DiagnosticsUpdated);
2649 }
2650 }
2651
2652 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2653 self.was_changed();
2654 cx.emit(BufferEvent::Operation {
2655 operation,
2656 is_local,
2657 });
2658 }
2659
2660 /// Removes the selections for a given peer.
2661 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2662 self.remote_selections.remove(&replica_id);
2663 cx.notify();
2664 }
2665
2666 /// Undoes the most recent transaction.
2667 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2668 let was_dirty = self.is_dirty();
2669 let old_version = self.version.clone();
2670
2671 if let Some((transaction_id, operation)) = self.text.undo() {
2672 self.send_operation(Operation::Buffer(operation), true, cx);
2673 self.did_edit(&old_version, was_dirty, cx);
2674 Some(transaction_id)
2675 } else {
2676 None
2677 }
2678 }
2679
2680 /// Manually undoes a specific transaction in the buffer's undo history.
2681 pub fn undo_transaction(
2682 &mut self,
2683 transaction_id: TransactionId,
2684 cx: &mut Context<Self>,
2685 ) -> bool {
2686 let was_dirty = self.is_dirty();
2687 let old_version = self.version.clone();
2688 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2689 self.send_operation(Operation::Buffer(operation), true, cx);
2690 self.did_edit(&old_version, was_dirty, cx);
2691 true
2692 } else {
2693 false
2694 }
2695 }
2696
2697 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2698 pub fn undo_to_transaction(
2699 &mut self,
2700 transaction_id: TransactionId,
2701 cx: &mut Context<Self>,
2702 ) -> bool {
2703 let was_dirty = self.is_dirty();
2704 let old_version = self.version.clone();
2705
2706 let operations = self.text.undo_to_transaction(transaction_id);
2707 let undone = !operations.is_empty();
2708 for operation in operations {
2709 self.send_operation(Operation::Buffer(operation), true, cx);
2710 }
2711 if undone {
2712 self.did_edit(&old_version, was_dirty, cx)
2713 }
2714 undone
2715 }
2716
2717 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2718 let was_dirty = self.is_dirty();
2719 let operation = self.text.undo_operations(counts);
2720 let old_version = self.version.clone();
2721 self.send_operation(Operation::Buffer(operation), true, cx);
2722 self.did_edit(&old_version, was_dirty, cx);
2723 }
2724
2725 /// Manually redoes a specific transaction in the buffer's redo history.
2726 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2727 let was_dirty = self.is_dirty();
2728 let old_version = self.version.clone();
2729
2730 if let Some((transaction_id, operation)) = self.text.redo() {
2731 self.send_operation(Operation::Buffer(operation), true, cx);
2732 self.did_edit(&old_version, was_dirty, cx);
2733 Some(transaction_id)
2734 } else {
2735 None
2736 }
2737 }
2738
2739 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2740 pub fn redo_to_transaction(
2741 &mut self,
2742 transaction_id: TransactionId,
2743 cx: &mut Context<Self>,
2744 ) -> bool {
2745 let was_dirty = self.is_dirty();
2746 let old_version = self.version.clone();
2747
2748 let operations = self.text.redo_to_transaction(transaction_id);
2749 let redone = !operations.is_empty();
2750 for operation in operations {
2751 self.send_operation(Operation::Buffer(operation), true, cx);
2752 }
2753 if redone {
2754 self.did_edit(&old_version, was_dirty, cx)
2755 }
2756 redone
2757 }
2758
2759 /// Override current completion triggers with the user-provided completion triggers.
2760 pub fn set_completion_triggers(
2761 &mut self,
2762 server_id: LanguageServerId,
2763 triggers: BTreeSet<String>,
2764 cx: &mut Context<Self>,
2765 ) {
2766 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2767 if triggers.is_empty() {
2768 self.completion_triggers_per_language_server
2769 .remove(&server_id);
2770 self.completion_triggers = self
2771 .completion_triggers_per_language_server
2772 .values()
2773 .flat_map(|triggers| triggers.iter().cloned())
2774 .collect();
2775 } else {
2776 self.completion_triggers_per_language_server
2777 .insert(server_id, triggers.clone());
2778 self.completion_triggers.extend(triggers.iter().cloned());
2779 }
2780 self.send_operation(
2781 Operation::UpdateCompletionTriggers {
2782 triggers: triggers.into_iter().collect(),
2783 lamport_timestamp: self.completion_triggers_timestamp,
2784 server_id,
2785 },
2786 true,
2787 cx,
2788 );
2789 cx.notify();
2790 }
2791
2792 /// Returns a list of strings which trigger a completion menu for this language.
2793 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2794 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2795 &self.completion_triggers
2796 }
2797
2798 /// Call this directly after performing edits to prevent the preview tab
2799 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2800 /// to return false until there are additional edits.
2801 pub fn refresh_preview(&mut self) {
2802 self.preview_version = self.version.clone();
2803 }
2804
2805 /// Whether we should preserve the preview status of a tab containing this buffer.
2806 pub fn preserve_preview(&self) -> bool {
2807 !self.has_edits_since(&self.preview_version)
2808 }
2809}
2810
2811#[doc(hidden)]
2812#[cfg(any(test, feature = "test-support"))]
2813impl Buffer {
2814 pub fn edit_via_marked_text(
2815 &mut self,
2816 marked_string: &str,
2817 autoindent_mode: Option<AutoindentMode>,
2818 cx: &mut Context<Self>,
2819 ) {
2820 let edits = self.edits_for_marked_text(marked_string);
2821 self.edit(edits, autoindent_mode, cx);
2822 }
2823
2824 pub fn set_group_interval(&mut self, group_interval: Duration) {
2825 self.text.set_group_interval(group_interval);
2826 }
2827
2828 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2829 where
2830 T: rand::Rng,
2831 {
2832 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2833 let mut last_end = None;
2834 for _ in 0..old_range_count {
2835 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2836 break;
2837 }
2838
2839 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2840 let mut range = self.random_byte_range(new_start, rng);
2841 if rng.gen_bool(0.2) {
2842 mem::swap(&mut range.start, &mut range.end);
2843 }
2844 last_end = Some(range.end);
2845
2846 let new_text_len = rng.gen_range(0..10);
2847 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2848 new_text = new_text.to_uppercase();
2849
2850 edits.push((range, new_text));
2851 }
2852 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2853 self.edit(edits, None, cx);
2854 }
2855
2856 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2857 let was_dirty = self.is_dirty();
2858 let old_version = self.version.clone();
2859
2860 let ops = self.text.randomly_undo_redo(rng);
2861 if !ops.is_empty() {
2862 for op in ops {
2863 self.send_operation(Operation::Buffer(op), true, cx);
2864 self.did_edit(&old_version, was_dirty, cx);
2865 }
2866 }
2867 }
2868}
2869
2870impl EventEmitter<BufferEvent> for Buffer {}
2871
2872impl Deref for Buffer {
2873 type Target = TextBuffer;
2874
2875 fn deref(&self) -> &Self::Target {
2876 &self.text
2877 }
2878}
2879
2880impl BufferSnapshot {
2881 /// Returns [`IndentSize`] for a given line that respects user settings and
2882 /// language preferences.
2883 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2884 indent_size_for_line(self, row)
2885 }
2886
2887 /// Returns [`IndentSize`] for a given position that respects user settings
2888 /// and language preferences.
2889 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2890 let settings = language_settings(
2891 self.language_at(position).map(|l| l.name()),
2892 self.file(),
2893 cx,
2894 );
2895 if settings.hard_tabs {
2896 IndentSize::tab()
2897 } else {
2898 IndentSize::spaces(settings.tab_size.get())
2899 }
2900 }
2901
2902 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2903 /// is passed in as `single_indent_size`.
2904 pub fn suggested_indents(
2905 &self,
2906 rows: impl Iterator<Item = u32>,
2907 single_indent_size: IndentSize,
2908 ) -> BTreeMap<u32, IndentSize> {
2909 let mut result = BTreeMap::new();
2910
2911 for row_range in contiguous_ranges(rows, 10) {
2912 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2913 Some(suggestions) => suggestions,
2914 _ => break,
2915 };
2916
2917 for (row, suggestion) in row_range.zip(suggestions) {
2918 let indent_size = if let Some(suggestion) = suggestion {
2919 result
2920 .get(&suggestion.basis_row)
2921 .copied()
2922 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2923 .with_delta(suggestion.delta, single_indent_size)
2924 } else {
2925 self.indent_size_for_line(row)
2926 };
2927
2928 result.insert(row, indent_size);
2929 }
2930 }
2931
2932 result
2933 }
2934
2935 fn suggest_autoindents(
2936 &self,
2937 row_range: Range<u32>,
2938 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2939 let config = &self.language.as_ref()?.config;
2940 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2941
2942 #[derive(Debug, Clone)]
2943 struct StartPosition {
2944 start: Point,
2945 suffix: SharedString,
2946 }
2947
2948 // Find the suggested indentation ranges based on the syntax tree.
2949 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2950 let end = Point::new(row_range.end, 0);
2951 let range = (start..end).to_offset(&self.text);
2952 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2953 Some(&grammar.indents_config.as_ref()?.query)
2954 });
2955 let indent_configs = matches
2956 .grammars()
2957 .iter()
2958 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2959 .collect::<Vec<_>>();
2960
2961 let mut indent_ranges = Vec::<Range<Point>>::new();
2962 let mut start_positions = Vec::<StartPosition>::new();
2963 let mut outdent_positions = Vec::<Point>::new();
2964 while let Some(mat) = matches.peek() {
2965 let mut start: Option<Point> = None;
2966 let mut end: Option<Point> = None;
2967
2968 let config = indent_configs[mat.grammar_index];
2969 for capture in mat.captures {
2970 if capture.index == config.indent_capture_ix {
2971 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2972 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2973 } else if Some(capture.index) == config.start_capture_ix {
2974 start = Some(Point::from_ts_point(capture.node.end_position()));
2975 } else if Some(capture.index) == config.end_capture_ix {
2976 end = Some(Point::from_ts_point(capture.node.start_position()));
2977 } else if Some(capture.index) == config.outdent_capture_ix {
2978 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2979 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2980 start_positions.push(StartPosition {
2981 start: Point::from_ts_point(capture.node.start_position()),
2982 suffix: suffix.clone(),
2983 });
2984 }
2985 }
2986
2987 matches.advance();
2988 if let Some((start, end)) = start.zip(end) {
2989 if start.row == end.row {
2990 continue;
2991 }
2992 let range = start..end;
2993 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2994 Err(ix) => indent_ranges.insert(ix, range),
2995 Ok(ix) => {
2996 let prev_range = &mut indent_ranges[ix];
2997 prev_range.end = prev_range.end.max(range.end);
2998 }
2999 }
3000 }
3001 }
3002
3003 let mut error_ranges = Vec::<Range<Point>>::new();
3004 let mut matches = self
3005 .syntax
3006 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3007 while let Some(mat) = matches.peek() {
3008 let node = mat.captures[0].node;
3009 let start = Point::from_ts_point(node.start_position());
3010 let end = Point::from_ts_point(node.end_position());
3011 let range = start..end;
3012 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3013 Ok(ix) | Err(ix) => ix,
3014 };
3015 let mut end_ix = ix;
3016 while let Some(existing_range) = error_ranges.get(end_ix) {
3017 if existing_range.end < end {
3018 end_ix += 1;
3019 } else {
3020 break;
3021 }
3022 }
3023 error_ranges.splice(ix..end_ix, [range]);
3024 matches.advance();
3025 }
3026
3027 outdent_positions.sort();
3028 for outdent_position in outdent_positions {
3029 // find the innermost indent range containing this outdent_position
3030 // set its end to the outdent position
3031 if let Some(range_to_truncate) = indent_ranges
3032 .iter_mut()
3033 .filter(|indent_range| indent_range.contains(&outdent_position))
3034 .next_back()
3035 {
3036 range_to_truncate.end = outdent_position;
3037 }
3038 }
3039
3040 start_positions.sort_by_key(|b| b.start);
3041
3042 // Find the suggested indentation increases and decreased based on regexes.
3043 let mut regex_outdent_map = HashMap::default();
3044 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3045 let mut start_positions_iter = start_positions.iter().peekable();
3046
3047 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3048 self.for_each_line(
3049 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3050 ..Point::new(row_range.end, 0),
3051 |row, line| {
3052 if config
3053 .decrease_indent_pattern
3054 .as_ref()
3055 .is_some_and(|regex| regex.is_match(line))
3056 {
3057 indent_change_rows.push((row, Ordering::Less));
3058 }
3059 if config
3060 .increase_indent_pattern
3061 .as_ref()
3062 .is_some_and(|regex| regex.is_match(line))
3063 {
3064 indent_change_rows.push((row + 1, Ordering::Greater));
3065 }
3066 while let Some(pos) = start_positions_iter.peek() {
3067 if pos.start.row < row {
3068 let pos = start_positions_iter.next().unwrap();
3069 last_seen_suffix
3070 .entry(pos.suffix.to_string())
3071 .or_default()
3072 .push(pos.start);
3073 } else {
3074 break;
3075 }
3076 }
3077 for rule in &config.decrease_indent_patterns {
3078 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3079 let row_start_column = self.indent_size_for_line(row).len;
3080 let basis_row = rule
3081 .valid_after
3082 .iter()
3083 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3084 .flatten()
3085 .filter(|start_point| start_point.column <= row_start_column)
3086 .max_by_key(|start_point| start_point.row);
3087 if let Some(outdent_to_row) = basis_row {
3088 regex_outdent_map.insert(row, outdent_to_row.row);
3089 }
3090 break;
3091 }
3092 }
3093 },
3094 );
3095
3096 let mut indent_changes = indent_change_rows.into_iter().peekable();
3097 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3098 prev_non_blank_row.unwrap_or(0)
3099 } else {
3100 row_range.start.saturating_sub(1)
3101 };
3102
3103 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3104 Some(row_range.map(move |row| {
3105 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3106
3107 let mut indent_from_prev_row = false;
3108 let mut outdent_from_prev_row = false;
3109 let mut outdent_to_row = u32::MAX;
3110 let mut from_regex = false;
3111
3112 while let Some((indent_row, delta)) = indent_changes.peek() {
3113 match indent_row.cmp(&row) {
3114 Ordering::Equal => match delta {
3115 Ordering::Less => {
3116 from_regex = true;
3117 outdent_from_prev_row = true
3118 }
3119 Ordering::Greater => {
3120 indent_from_prev_row = true;
3121 from_regex = true
3122 }
3123 _ => {}
3124 },
3125
3126 Ordering::Greater => break,
3127 Ordering::Less => {}
3128 }
3129
3130 indent_changes.next();
3131 }
3132
3133 for range in &indent_ranges {
3134 if range.start.row >= row {
3135 break;
3136 }
3137 if range.start.row == prev_row && range.end > row_start {
3138 indent_from_prev_row = true;
3139 }
3140 if range.end > prev_row_start && range.end <= row_start {
3141 outdent_to_row = outdent_to_row.min(range.start.row);
3142 }
3143 }
3144
3145 if let Some(basis_row) = regex_outdent_map.get(&row) {
3146 indent_from_prev_row = false;
3147 outdent_to_row = *basis_row;
3148 from_regex = true;
3149 }
3150
3151 let within_error = error_ranges
3152 .iter()
3153 .any(|e| e.start.row < row && e.end > row_start);
3154
3155 let suggestion = if outdent_to_row == prev_row
3156 || (outdent_from_prev_row && indent_from_prev_row)
3157 {
3158 Some(IndentSuggestion {
3159 basis_row: prev_row,
3160 delta: Ordering::Equal,
3161 within_error: within_error && !from_regex,
3162 })
3163 } else if indent_from_prev_row {
3164 Some(IndentSuggestion {
3165 basis_row: prev_row,
3166 delta: Ordering::Greater,
3167 within_error: within_error && !from_regex,
3168 })
3169 } else if outdent_to_row < prev_row {
3170 Some(IndentSuggestion {
3171 basis_row: outdent_to_row,
3172 delta: Ordering::Equal,
3173 within_error: within_error && !from_regex,
3174 })
3175 } else if outdent_from_prev_row {
3176 Some(IndentSuggestion {
3177 basis_row: prev_row,
3178 delta: Ordering::Less,
3179 within_error: within_error && !from_regex,
3180 })
3181 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3182 {
3183 Some(IndentSuggestion {
3184 basis_row: prev_row,
3185 delta: Ordering::Equal,
3186 within_error: within_error && !from_regex,
3187 })
3188 } else {
3189 None
3190 };
3191
3192 prev_row = row;
3193 prev_row_start = row_start;
3194 suggestion
3195 }))
3196 }
3197
3198 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3199 while row > 0 {
3200 row -= 1;
3201 if !self.is_line_blank(row) {
3202 return Some(row);
3203 }
3204 }
3205 None
3206 }
3207
3208 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3209 let captures = self.syntax.captures(range, &self.text, |grammar| {
3210 grammar.highlights_query.as_ref()
3211 });
3212 let highlight_maps = captures
3213 .grammars()
3214 .iter()
3215 .map(|grammar| grammar.highlight_map())
3216 .collect();
3217 (captures, highlight_maps)
3218 }
3219
3220 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3221 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3222 /// returned in chunks where each chunk has a single syntax highlighting style and
3223 /// diagnostic status.
3224 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3225 let range = range.start.to_offset(self)..range.end.to_offset(self);
3226
3227 let mut syntax = None;
3228 if language_aware {
3229 syntax = Some(self.get_highlights(range.clone()));
3230 }
3231 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3232 let diagnostics = language_aware;
3233 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3234 }
3235
3236 pub fn highlighted_text_for_range<T: ToOffset>(
3237 &self,
3238 range: Range<T>,
3239 override_style: Option<HighlightStyle>,
3240 syntax_theme: &SyntaxTheme,
3241 ) -> HighlightedText {
3242 HighlightedText::from_buffer_range(
3243 range,
3244 &self.text,
3245 &self.syntax,
3246 override_style,
3247 syntax_theme,
3248 )
3249 }
3250
3251 /// Invokes the given callback for each line of text in the given range of the buffer.
3252 /// Uses callback to avoid allocating a string for each line.
3253 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3254 let mut line = String::new();
3255 let mut row = range.start.row;
3256 for chunk in self
3257 .as_rope()
3258 .chunks_in_range(range.to_offset(self))
3259 .chain(["\n"])
3260 {
3261 for (newline_ix, text) in chunk.split('\n').enumerate() {
3262 if newline_ix > 0 {
3263 callback(row, &line);
3264 row += 1;
3265 line.clear();
3266 }
3267 line.push_str(text);
3268 }
3269 }
3270 }
3271
3272 /// Iterates over every [`SyntaxLayer`] in the buffer.
3273 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3274 self.syntax
3275 .layers_for_range(0..self.len(), &self.text, true)
3276 }
3277
3278 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3279 let offset = position.to_offset(self);
3280 self.syntax
3281 .layers_for_range(offset..offset, &self.text, false)
3282 .filter(|l| l.node().end_byte() > offset)
3283 .last()
3284 }
3285
3286 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3287 &self,
3288 range: Range<D>,
3289 ) -> Option<SyntaxLayer<'_>> {
3290 let range = range.to_offset(self);
3291 self.syntax
3292 .layers_for_range(range, &self.text, false)
3293 .max_by(|a, b| {
3294 if a.depth != b.depth {
3295 a.depth.cmp(&b.depth)
3296 } else if a.offset.0 != b.offset.0 {
3297 a.offset.0.cmp(&b.offset.0)
3298 } else {
3299 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3300 }
3301 })
3302 }
3303
3304 /// Returns the main [`Language`].
3305 pub fn language(&self) -> Option<&Arc<Language>> {
3306 self.language.as_ref()
3307 }
3308
3309 /// Returns the [`Language`] at the given location.
3310 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3311 self.syntax_layer_at(position)
3312 .map(|info| info.language)
3313 .or(self.language.as_ref())
3314 }
3315
3316 /// Returns the settings for the language at the given location.
3317 pub fn settings_at<'a, D: ToOffset>(
3318 &'a self,
3319 position: D,
3320 cx: &'a App,
3321 ) -> Cow<'a, LanguageSettings> {
3322 language_settings(
3323 self.language_at(position).map(|l| l.name()),
3324 self.file.as_ref(),
3325 cx,
3326 )
3327 }
3328
3329 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3330 CharClassifier::new(self.language_scope_at(point))
3331 }
3332
3333 /// Returns the [`LanguageScope`] at the given location.
3334 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3335 let offset = position.to_offset(self);
3336 let mut scope = None;
3337 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3338
3339 // Use the layer that has the smallest node intersecting the given point.
3340 for layer in self
3341 .syntax
3342 .layers_for_range(offset..offset, &self.text, false)
3343 {
3344 let mut cursor = layer.node().walk();
3345
3346 let mut range = None;
3347 loop {
3348 let child_range = cursor.node().byte_range();
3349 if !child_range.contains(&offset) {
3350 break;
3351 }
3352
3353 range = Some(child_range);
3354 if cursor.goto_first_child_for_byte(offset).is_none() {
3355 break;
3356 }
3357 }
3358
3359 if let Some(range) = range
3360 && smallest_range_and_depth.as_ref().is_none_or(
3361 |(smallest_range, smallest_range_depth)| {
3362 if layer.depth > *smallest_range_depth {
3363 true
3364 } else if layer.depth == *smallest_range_depth {
3365 range.len() < smallest_range.len()
3366 } else {
3367 false
3368 }
3369 },
3370 )
3371 {
3372 smallest_range_and_depth = Some((range, layer.depth));
3373 scope = Some(LanguageScope {
3374 language: layer.language.clone(),
3375 override_id: layer.override_id(offset, &self.text),
3376 });
3377 }
3378 }
3379
3380 scope.or_else(|| {
3381 self.language.clone().map(|language| LanguageScope {
3382 language,
3383 override_id: None,
3384 })
3385 })
3386 }
3387
3388 /// Returns a tuple of the range and character kind of the word
3389 /// surrounding the given position.
3390 pub fn surrounding_word<T: ToOffset>(
3391 &self,
3392 start: T,
3393 for_completion: bool,
3394 ) -> (Range<usize>, Option<CharKind>) {
3395 let mut start = start.to_offset(self);
3396 let mut end = start;
3397 let mut next_chars = self.chars_at(start).take(128).peekable();
3398 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3399
3400 let classifier = self
3401 .char_classifier_at(start)
3402 .for_completion(for_completion);
3403 let word_kind = cmp::max(
3404 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3405 next_chars.peek().copied().map(|c| classifier.kind(c)),
3406 );
3407
3408 for ch in prev_chars {
3409 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3410 start -= ch.len_utf8();
3411 } else {
3412 break;
3413 }
3414 }
3415
3416 for ch in next_chars {
3417 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3418 end += ch.len_utf8();
3419 } else {
3420 break;
3421 }
3422 }
3423
3424 (start..end, word_kind)
3425 }
3426
3427 /// Returns the closest syntax node enclosing the given range.
3428 pub fn syntax_ancestor<'a, T: ToOffset>(
3429 &'a self,
3430 range: Range<T>,
3431 ) -> Option<tree_sitter::Node<'a>> {
3432 let range = range.start.to_offset(self)..range.end.to_offset(self);
3433 let mut result: Option<tree_sitter::Node<'a>> = None;
3434 'outer: for layer in self
3435 .syntax
3436 .layers_for_range(range.clone(), &self.text, true)
3437 {
3438 let mut cursor = layer.node().walk();
3439
3440 // Descend to the first leaf that touches the start of the range.
3441 //
3442 // If the range is non-empty and the current node ends exactly at the start,
3443 // move to the next sibling to find a node that extends beyond the start.
3444 //
3445 // If the range is empty and the current node starts after the range position,
3446 // move to the previous sibling to find the node that contains the position.
3447 while cursor.goto_first_child_for_byte(range.start).is_some() {
3448 if !range.is_empty() && cursor.node().end_byte() == range.start {
3449 cursor.goto_next_sibling();
3450 }
3451 if range.is_empty() && cursor.node().start_byte() > range.start {
3452 cursor.goto_previous_sibling();
3453 }
3454 }
3455
3456 // Ascend to the smallest ancestor that strictly contains the range.
3457 loop {
3458 let node_range = cursor.node().byte_range();
3459 if node_range.start <= range.start
3460 && node_range.end >= range.end
3461 && node_range.len() > range.len()
3462 {
3463 break;
3464 }
3465 if !cursor.goto_parent() {
3466 continue 'outer;
3467 }
3468 }
3469
3470 let left_node = cursor.node();
3471 let mut layer_result = left_node;
3472
3473 // For an empty range, try to find another node immediately to the right of the range.
3474 if left_node.end_byte() == range.start {
3475 let mut right_node = None;
3476 while !cursor.goto_next_sibling() {
3477 if !cursor.goto_parent() {
3478 break;
3479 }
3480 }
3481
3482 while cursor.node().start_byte() == range.start {
3483 right_node = Some(cursor.node());
3484 if !cursor.goto_first_child() {
3485 break;
3486 }
3487 }
3488
3489 // If there is a candidate node on both sides of the (empty) range, then
3490 // decide between the two by favoring a named node over an anonymous token.
3491 // If both nodes are the same in that regard, favor the right one.
3492 if let Some(right_node) = right_node
3493 && (right_node.is_named() || !left_node.is_named())
3494 {
3495 layer_result = right_node;
3496 }
3497 }
3498
3499 if let Some(previous_result) = &result
3500 && previous_result.byte_range().len() < layer_result.byte_range().len()
3501 {
3502 continue;
3503 }
3504 result = Some(layer_result);
3505 }
3506
3507 result
3508 }
3509
3510 /// Returns the root syntax node within the given row
3511 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3512 let start_offset = position.to_offset(self);
3513
3514 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3515
3516 let layer = self
3517 .syntax
3518 .layers_for_range(start_offset..start_offset, &self.text, true)
3519 .next()?;
3520
3521 let mut cursor = layer.node().walk();
3522
3523 // Descend to the first leaf that touches the start of the range.
3524 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3525 if cursor.node().end_byte() == start_offset {
3526 cursor.goto_next_sibling();
3527 }
3528 }
3529
3530 // Ascend to the root node within the same row.
3531 while cursor.goto_parent() {
3532 if cursor.node().start_position().row != row {
3533 break;
3534 }
3535 }
3536
3537 Some(cursor.node())
3538 }
3539
3540 /// Returns the outline for the buffer.
3541 ///
3542 /// This method allows passing an optional [`SyntaxTheme`] to
3543 /// syntax-highlight the returned symbols.
3544 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3545 self.outline_items_containing(0..self.len(), true, theme)
3546 .map(Outline::new)
3547 }
3548
3549 /// Returns all the symbols that contain the given position.
3550 ///
3551 /// This method allows passing an optional [`SyntaxTheme`] to
3552 /// syntax-highlight the returned symbols.
3553 pub fn symbols_containing<T: ToOffset>(
3554 &self,
3555 position: T,
3556 theme: Option<&SyntaxTheme>,
3557 ) -> Option<Vec<OutlineItem<Anchor>>> {
3558 let position = position.to_offset(self);
3559 let mut items = self.outline_items_containing(
3560 position.saturating_sub(1)..self.len().min(position + 1),
3561 false,
3562 theme,
3563 )?;
3564 let mut prev_depth = None;
3565 items.retain(|item| {
3566 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3567 prev_depth = Some(item.depth);
3568 result
3569 });
3570 Some(items)
3571 }
3572
3573 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3574 let range = range.to_offset(self);
3575 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3576 grammar.outline_config.as_ref().map(|c| &c.query)
3577 });
3578 let configs = matches
3579 .grammars()
3580 .iter()
3581 .map(|g| g.outline_config.as_ref().unwrap())
3582 .collect::<Vec<_>>();
3583
3584 while let Some(mat) = matches.peek() {
3585 let config = &configs[mat.grammar_index];
3586 let containing_item_node = maybe!({
3587 let item_node = mat.captures.iter().find_map(|cap| {
3588 if cap.index == config.item_capture_ix {
3589 Some(cap.node)
3590 } else {
3591 None
3592 }
3593 })?;
3594
3595 let item_byte_range = item_node.byte_range();
3596 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3597 None
3598 } else {
3599 Some(item_node)
3600 }
3601 });
3602
3603 if let Some(item_node) = containing_item_node {
3604 return Some(
3605 Point::from_ts_point(item_node.start_position())
3606 ..Point::from_ts_point(item_node.end_position()),
3607 );
3608 }
3609
3610 matches.advance();
3611 }
3612 None
3613 }
3614
3615 pub fn outline_items_containing<T: ToOffset>(
3616 &self,
3617 range: Range<T>,
3618 include_extra_context: bool,
3619 theme: Option<&SyntaxTheme>,
3620 ) -> Option<Vec<OutlineItem<Anchor>>> {
3621 let range = range.to_offset(self);
3622 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3623 grammar.outline_config.as_ref().map(|c| &c.query)
3624 });
3625 let configs = matches
3626 .grammars()
3627 .iter()
3628 .map(|g| g.outline_config.as_ref().unwrap())
3629 .collect::<Vec<_>>();
3630
3631 let mut items = Vec::new();
3632 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3633 while let Some(mat) = matches.peek() {
3634 let config = &configs[mat.grammar_index];
3635 if let Some(item) =
3636 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3637 {
3638 items.push(item);
3639 } else if let Some(capture) = mat
3640 .captures
3641 .iter()
3642 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3643 {
3644 let capture_range = capture.node.start_position()..capture.node.end_position();
3645 let mut capture_row_range =
3646 capture_range.start.row as u32..capture_range.end.row as u32;
3647 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3648 {
3649 capture_row_range.end -= 1;
3650 }
3651 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3652 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3653 last_row_range.end = capture_row_range.end;
3654 } else {
3655 annotation_row_ranges.push(capture_row_range);
3656 }
3657 } else {
3658 annotation_row_ranges.push(capture_row_range);
3659 }
3660 }
3661 matches.advance();
3662 }
3663
3664 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3665
3666 // Assign depths based on containment relationships and convert to anchors.
3667 let mut item_ends_stack = Vec::<Point>::new();
3668 let mut anchor_items = Vec::new();
3669 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3670 for item in items {
3671 while let Some(last_end) = item_ends_stack.last().copied() {
3672 if last_end < item.range.end {
3673 item_ends_stack.pop();
3674 } else {
3675 break;
3676 }
3677 }
3678
3679 let mut annotation_row_range = None;
3680 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3681 let row_preceding_item = item.range.start.row.saturating_sub(1);
3682 if next_annotation_row_range.end < row_preceding_item {
3683 annotation_row_ranges.next();
3684 } else {
3685 if next_annotation_row_range.end == row_preceding_item {
3686 annotation_row_range = Some(next_annotation_row_range.clone());
3687 annotation_row_ranges.next();
3688 }
3689 break;
3690 }
3691 }
3692
3693 anchor_items.push(OutlineItem {
3694 depth: item_ends_stack.len(),
3695 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3696 text: item.text,
3697 highlight_ranges: item.highlight_ranges,
3698 name_ranges: item.name_ranges,
3699 body_range: item.body_range.map(|body_range| {
3700 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3701 }),
3702 annotation_range: annotation_row_range.map(|annotation_range| {
3703 self.anchor_after(Point::new(annotation_range.start, 0))
3704 ..self.anchor_before(Point::new(
3705 annotation_range.end,
3706 self.line_len(annotation_range.end),
3707 ))
3708 }),
3709 });
3710 item_ends_stack.push(item.range.end);
3711 }
3712
3713 Some(anchor_items)
3714 }
3715
3716 fn next_outline_item(
3717 &self,
3718 config: &OutlineConfig,
3719 mat: &SyntaxMapMatch,
3720 range: &Range<usize>,
3721 include_extra_context: bool,
3722 theme: Option<&SyntaxTheme>,
3723 ) -> Option<OutlineItem<Point>> {
3724 let item_node = mat.captures.iter().find_map(|cap| {
3725 if cap.index == config.item_capture_ix {
3726 Some(cap.node)
3727 } else {
3728 None
3729 }
3730 })?;
3731
3732 let item_byte_range = item_node.byte_range();
3733 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3734 return None;
3735 }
3736 let item_point_range = Point::from_ts_point(item_node.start_position())
3737 ..Point::from_ts_point(item_node.end_position());
3738
3739 let mut open_point = None;
3740 let mut close_point = None;
3741 let mut buffer_ranges = Vec::new();
3742 for capture in mat.captures {
3743 let node_is_name;
3744 if capture.index == config.name_capture_ix {
3745 node_is_name = true;
3746 } else if Some(capture.index) == config.context_capture_ix
3747 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3748 {
3749 node_is_name = false;
3750 } else {
3751 if Some(capture.index) == config.open_capture_ix {
3752 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3753 } else if Some(capture.index) == config.close_capture_ix {
3754 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3755 }
3756
3757 continue;
3758 }
3759
3760 let mut range = capture.node.start_byte()..capture.node.end_byte();
3761 let start = capture.node.start_position();
3762 if capture.node.end_position().row > start.row {
3763 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3764 }
3765
3766 if !range.is_empty() {
3767 buffer_ranges.push((range, node_is_name));
3768 }
3769 }
3770 if buffer_ranges.is_empty() {
3771 return None;
3772 }
3773 let mut text = String::new();
3774 let mut highlight_ranges = Vec::new();
3775 let mut name_ranges = Vec::new();
3776 let mut chunks = self.chunks(
3777 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3778 true,
3779 );
3780 let mut last_buffer_range_end = 0;
3781
3782 for (buffer_range, is_name) in buffer_ranges {
3783 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3784 if space_added {
3785 text.push(' ');
3786 }
3787 let before_append_len = text.len();
3788 let mut offset = buffer_range.start;
3789 chunks.seek(buffer_range.clone());
3790 for mut chunk in chunks.by_ref() {
3791 if chunk.text.len() > buffer_range.end - offset {
3792 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3793 offset = buffer_range.end;
3794 } else {
3795 offset += chunk.text.len();
3796 }
3797 let style = chunk
3798 .syntax_highlight_id
3799 .zip(theme)
3800 .and_then(|(highlight, theme)| highlight.style(theme));
3801 if let Some(style) = style {
3802 let start = text.len();
3803 let end = start + chunk.text.len();
3804 highlight_ranges.push((start..end, style));
3805 }
3806 text.push_str(chunk.text);
3807 if offset >= buffer_range.end {
3808 break;
3809 }
3810 }
3811 if is_name {
3812 let after_append_len = text.len();
3813 let start = if space_added && !name_ranges.is_empty() {
3814 before_append_len - 1
3815 } else {
3816 before_append_len
3817 };
3818 name_ranges.push(start..after_append_len);
3819 }
3820 last_buffer_range_end = buffer_range.end;
3821 }
3822
3823 Some(OutlineItem {
3824 depth: 0, // We'll calculate the depth later
3825 range: item_point_range,
3826 text,
3827 highlight_ranges,
3828 name_ranges,
3829 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3830 annotation_range: None,
3831 })
3832 }
3833
3834 pub fn function_body_fold_ranges<T: ToOffset>(
3835 &self,
3836 within: Range<T>,
3837 ) -> impl Iterator<Item = Range<usize>> + '_ {
3838 self.text_object_ranges(within, TreeSitterOptions::default())
3839 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3840 }
3841
3842 /// For each grammar in the language, runs the provided
3843 /// [`tree_sitter::Query`] against the given range.
3844 pub fn matches(
3845 &self,
3846 range: Range<usize>,
3847 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3848 ) -> SyntaxMapMatches<'_> {
3849 self.syntax.matches(range, self, query)
3850 }
3851
3852 pub fn all_bracket_ranges(
3853 &self,
3854 range: Range<usize>,
3855 ) -> impl Iterator<Item = BracketMatch> + '_ {
3856 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3857 grammar.brackets_config.as_ref().map(|c| &c.query)
3858 });
3859 let configs = matches
3860 .grammars()
3861 .iter()
3862 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3863 .collect::<Vec<_>>();
3864
3865 iter::from_fn(move || {
3866 while let Some(mat) = matches.peek() {
3867 let mut open = None;
3868 let mut close = None;
3869 let config = &configs[mat.grammar_index];
3870 let pattern = &config.patterns[mat.pattern_index];
3871 for capture in mat.captures {
3872 if capture.index == config.open_capture_ix {
3873 open = Some(capture.node.byte_range());
3874 } else if capture.index == config.close_capture_ix {
3875 close = Some(capture.node.byte_range());
3876 }
3877 }
3878
3879 matches.advance();
3880
3881 let Some((open_range, close_range)) = open.zip(close) else {
3882 continue;
3883 };
3884
3885 let bracket_range = open_range.start..=close_range.end;
3886 if !bracket_range.overlaps(&range) {
3887 continue;
3888 }
3889
3890 return Some(BracketMatch {
3891 open_range,
3892 close_range,
3893 newline_only: pattern.newline_only,
3894 });
3895 }
3896 None
3897 })
3898 }
3899
3900 /// Returns bracket range pairs overlapping or adjacent to `range`
3901 pub fn bracket_ranges<T: ToOffset>(
3902 &self,
3903 range: Range<T>,
3904 ) -> impl Iterator<Item = BracketMatch> + '_ {
3905 // Find bracket pairs that *inclusively* contain the given range.
3906 let range = range.start.to_offset(self).saturating_sub(1)
3907 ..self.len().min(range.end.to_offset(self) + 1);
3908 self.all_bracket_ranges(range)
3909 .filter(|pair| !pair.newline_only)
3910 }
3911
3912 pub fn debug_variables_query<T: ToOffset>(
3913 &self,
3914 range: Range<T>,
3915 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3916 let range = range.start.to_offset(self).saturating_sub(1)
3917 ..self.len().min(range.end.to_offset(self) + 1);
3918
3919 let mut matches = self.syntax.matches_with_options(
3920 range.clone(),
3921 &self.text,
3922 TreeSitterOptions::default(),
3923 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3924 );
3925
3926 let configs = matches
3927 .grammars()
3928 .iter()
3929 .map(|grammar| grammar.debug_variables_config.as_ref())
3930 .collect::<Vec<_>>();
3931
3932 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3933
3934 iter::from_fn(move || {
3935 loop {
3936 while let Some(capture) = captures.pop() {
3937 if capture.0.overlaps(&range) {
3938 return Some(capture);
3939 }
3940 }
3941
3942 let mat = matches.peek()?;
3943
3944 let Some(config) = configs[mat.grammar_index].as_ref() else {
3945 matches.advance();
3946 continue;
3947 };
3948
3949 for capture in mat.captures {
3950 let Some(ix) = config
3951 .objects_by_capture_ix
3952 .binary_search_by_key(&capture.index, |e| e.0)
3953 .ok()
3954 else {
3955 continue;
3956 };
3957 let text_object = config.objects_by_capture_ix[ix].1;
3958 let byte_range = capture.node.byte_range();
3959
3960 let mut found = false;
3961 for (range, existing) in captures.iter_mut() {
3962 if existing == &text_object {
3963 range.start = range.start.min(byte_range.start);
3964 range.end = range.end.max(byte_range.end);
3965 found = true;
3966 break;
3967 }
3968 }
3969
3970 if !found {
3971 captures.push((byte_range, text_object));
3972 }
3973 }
3974
3975 matches.advance();
3976 }
3977 })
3978 }
3979
3980 pub fn text_object_ranges<T: ToOffset>(
3981 &self,
3982 range: Range<T>,
3983 options: TreeSitterOptions,
3984 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3985 let range = range.start.to_offset(self).saturating_sub(1)
3986 ..self.len().min(range.end.to_offset(self) + 1);
3987
3988 let mut matches =
3989 self.syntax
3990 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3991 grammar.text_object_config.as_ref().map(|c| &c.query)
3992 });
3993
3994 let configs = matches
3995 .grammars()
3996 .iter()
3997 .map(|grammar| grammar.text_object_config.as_ref())
3998 .collect::<Vec<_>>();
3999
4000 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4001
4002 iter::from_fn(move || {
4003 loop {
4004 while let Some(capture) = captures.pop() {
4005 if capture.0.overlaps(&range) {
4006 return Some(capture);
4007 }
4008 }
4009
4010 let mat = matches.peek()?;
4011
4012 let Some(config) = configs[mat.grammar_index].as_ref() else {
4013 matches.advance();
4014 continue;
4015 };
4016
4017 for capture in mat.captures {
4018 let Some(ix) = config
4019 .text_objects_by_capture_ix
4020 .binary_search_by_key(&capture.index, |e| e.0)
4021 .ok()
4022 else {
4023 continue;
4024 };
4025 let text_object = config.text_objects_by_capture_ix[ix].1;
4026 let byte_range = capture.node.byte_range();
4027
4028 let mut found = false;
4029 for (range, existing) in captures.iter_mut() {
4030 if existing == &text_object {
4031 range.start = range.start.min(byte_range.start);
4032 range.end = range.end.max(byte_range.end);
4033 found = true;
4034 break;
4035 }
4036 }
4037
4038 if !found {
4039 captures.push((byte_range, text_object));
4040 }
4041 }
4042
4043 matches.advance();
4044 }
4045 })
4046 }
4047
4048 /// Returns enclosing bracket ranges containing the given range
4049 pub fn enclosing_bracket_ranges<T: ToOffset>(
4050 &self,
4051 range: Range<T>,
4052 ) -> impl Iterator<Item = BracketMatch> + '_ {
4053 let range = range.start.to_offset(self)..range.end.to_offset(self);
4054
4055 self.bracket_ranges(range.clone()).filter(move |pair| {
4056 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4057 })
4058 }
4059
4060 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4061 ///
4062 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4063 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4064 &self,
4065 range: Range<T>,
4066 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4067 ) -> Option<(Range<usize>, Range<usize>)> {
4068 let range = range.start.to_offset(self)..range.end.to_offset(self);
4069
4070 // Get the ranges of the innermost pair of brackets.
4071 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4072
4073 for pair in self.enclosing_bracket_ranges(range) {
4074 if let Some(range_filter) = range_filter
4075 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4076 {
4077 continue;
4078 }
4079
4080 let len = pair.close_range.end - pair.open_range.start;
4081
4082 if let Some((existing_open, existing_close)) = &result {
4083 let existing_len = existing_close.end - existing_open.start;
4084 if len > existing_len {
4085 continue;
4086 }
4087 }
4088
4089 result = Some((pair.open_range, pair.close_range));
4090 }
4091
4092 result
4093 }
4094
4095 /// Returns anchor ranges for any matches of the redaction query.
4096 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4097 /// will be run on the relevant section of the buffer.
4098 pub fn redacted_ranges<T: ToOffset>(
4099 &self,
4100 range: Range<T>,
4101 ) -> impl Iterator<Item = Range<usize>> + '_ {
4102 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4103 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4104 grammar
4105 .redactions_config
4106 .as_ref()
4107 .map(|config| &config.query)
4108 });
4109
4110 let configs = syntax_matches
4111 .grammars()
4112 .iter()
4113 .map(|grammar| grammar.redactions_config.as_ref())
4114 .collect::<Vec<_>>();
4115
4116 iter::from_fn(move || {
4117 let redacted_range = syntax_matches
4118 .peek()
4119 .and_then(|mat| {
4120 configs[mat.grammar_index].and_then(|config| {
4121 mat.captures
4122 .iter()
4123 .find(|capture| capture.index == config.redaction_capture_ix)
4124 })
4125 })
4126 .map(|mat| mat.node.byte_range());
4127 syntax_matches.advance();
4128 redacted_range
4129 })
4130 }
4131
4132 pub fn injections_intersecting_range<T: ToOffset>(
4133 &self,
4134 range: Range<T>,
4135 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4136 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4137
4138 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4139 grammar
4140 .injection_config
4141 .as_ref()
4142 .map(|config| &config.query)
4143 });
4144
4145 let configs = syntax_matches
4146 .grammars()
4147 .iter()
4148 .map(|grammar| grammar.injection_config.as_ref())
4149 .collect::<Vec<_>>();
4150
4151 iter::from_fn(move || {
4152 let ranges = syntax_matches.peek().and_then(|mat| {
4153 let config = &configs[mat.grammar_index]?;
4154 let content_capture_range = mat.captures.iter().find_map(|capture| {
4155 if capture.index == config.content_capture_ix {
4156 Some(capture.node.byte_range())
4157 } else {
4158 None
4159 }
4160 })?;
4161 let language = self.language_at(content_capture_range.start)?;
4162 Some((content_capture_range, language))
4163 });
4164 syntax_matches.advance();
4165 ranges
4166 })
4167 }
4168
4169 pub fn runnable_ranges(
4170 &self,
4171 offset_range: Range<usize>,
4172 ) -> impl Iterator<Item = RunnableRange> + '_ {
4173 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4174 grammar.runnable_config.as_ref().map(|config| &config.query)
4175 });
4176
4177 let test_configs = syntax_matches
4178 .grammars()
4179 .iter()
4180 .map(|grammar| grammar.runnable_config.as_ref())
4181 .collect::<Vec<_>>();
4182
4183 iter::from_fn(move || {
4184 loop {
4185 let mat = syntax_matches.peek()?;
4186
4187 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4188 let mut run_range = None;
4189 let full_range = mat.captures.iter().fold(
4190 Range {
4191 start: usize::MAX,
4192 end: 0,
4193 },
4194 |mut acc, next| {
4195 let byte_range = next.node.byte_range();
4196 if acc.start > byte_range.start {
4197 acc.start = byte_range.start;
4198 }
4199 if acc.end < byte_range.end {
4200 acc.end = byte_range.end;
4201 }
4202 acc
4203 },
4204 );
4205 if full_range.start > full_range.end {
4206 // We did not find a full spanning range of this match.
4207 return None;
4208 }
4209 let extra_captures: SmallVec<[_; 1]> =
4210 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4211 test_configs
4212 .extra_captures
4213 .get(capture.index as usize)
4214 .cloned()
4215 .and_then(|tag_name| match tag_name {
4216 RunnableCapture::Named(name) => {
4217 Some((capture.node.byte_range(), name))
4218 }
4219 RunnableCapture::Run => {
4220 let _ = run_range.insert(capture.node.byte_range());
4221 None
4222 }
4223 })
4224 }));
4225 let run_range = run_range?;
4226 let tags = test_configs
4227 .query
4228 .property_settings(mat.pattern_index)
4229 .iter()
4230 .filter_map(|property| {
4231 if *property.key == *"tag" {
4232 property
4233 .value
4234 .as_ref()
4235 .map(|value| RunnableTag(value.to_string().into()))
4236 } else {
4237 None
4238 }
4239 })
4240 .collect();
4241 let extra_captures = extra_captures
4242 .into_iter()
4243 .map(|(range, name)| {
4244 (
4245 name.to_string(),
4246 self.text_for_range(range).collect::<String>(),
4247 )
4248 })
4249 .collect();
4250 // All tags should have the same range.
4251 Some(RunnableRange {
4252 run_range,
4253 full_range,
4254 runnable: Runnable {
4255 tags,
4256 language: mat.language,
4257 buffer: self.remote_id(),
4258 },
4259 extra_captures,
4260 buffer_id: self.remote_id(),
4261 })
4262 });
4263
4264 syntax_matches.advance();
4265 if test_range.is_some() {
4266 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4267 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4268 return test_range;
4269 }
4270 }
4271 })
4272 }
4273
4274 /// Returns selections for remote peers intersecting the given range.
4275 #[allow(clippy::type_complexity)]
4276 pub fn selections_in_range(
4277 &self,
4278 range: Range<Anchor>,
4279 include_local: bool,
4280 ) -> impl Iterator<
4281 Item = (
4282 ReplicaId,
4283 bool,
4284 CursorShape,
4285 impl Iterator<Item = &Selection<Anchor>> + '_,
4286 ),
4287 > + '_ {
4288 self.remote_selections
4289 .iter()
4290 .filter(move |(replica_id, set)| {
4291 (include_local || **replica_id != self.text.replica_id())
4292 && !set.selections.is_empty()
4293 })
4294 .map(move |(replica_id, set)| {
4295 let start_ix = match set.selections.binary_search_by(|probe| {
4296 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4297 }) {
4298 Ok(ix) | Err(ix) => ix,
4299 };
4300 let end_ix = match set.selections.binary_search_by(|probe| {
4301 probe.start.cmp(&range.end, self).then(Ordering::Less)
4302 }) {
4303 Ok(ix) | Err(ix) => ix,
4304 };
4305
4306 (
4307 *replica_id,
4308 set.line_mode,
4309 set.cursor_shape,
4310 set.selections[start_ix..end_ix].iter(),
4311 )
4312 })
4313 }
4314
4315 /// Returns if the buffer contains any diagnostics.
4316 pub fn has_diagnostics(&self) -> bool {
4317 !self.diagnostics.is_empty()
4318 }
4319
4320 /// Returns all the diagnostics intersecting the given range.
4321 pub fn diagnostics_in_range<'a, T, O>(
4322 &'a self,
4323 search_range: Range<T>,
4324 reversed: bool,
4325 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4326 where
4327 T: 'a + Clone + ToOffset,
4328 O: 'a + FromAnchor,
4329 {
4330 let mut iterators: Vec<_> = self
4331 .diagnostics
4332 .iter()
4333 .map(|(_, collection)| {
4334 collection
4335 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4336 .peekable()
4337 })
4338 .collect();
4339
4340 std::iter::from_fn(move || {
4341 let (next_ix, _) = iterators
4342 .iter_mut()
4343 .enumerate()
4344 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4345 .min_by(|(_, a), (_, b)| {
4346 let cmp = a
4347 .range
4348 .start
4349 .cmp(&b.range.start, self)
4350 // when range is equal, sort by diagnostic severity
4351 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4352 // and stabilize order with group_id
4353 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4354 if reversed { cmp.reverse() } else { cmp }
4355 })?;
4356 iterators[next_ix]
4357 .next()
4358 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4359 diagnostic,
4360 range: FromAnchor::from_anchor(&range.start, self)
4361 ..FromAnchor::from_anchor(&range.end, self),
4362 })
4363 })
4364 }
4365
4366 /// Returns all the diagnostic groups associated with the given
4367 /// language server ID. If no language server ID is provided,
4368 /// all diagnostics groups are returned.
4369 pub fn diagnostic_groups(
4370 &self,
4371 language_server_id: Option<LanguageServerId>,
4372 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4373 let mut groups = Vec::new();
4374
4375 if let Some(language_server_id) = language_server_id {
4376 if let Ok(ix) = self
4377 .diagnostics
4378 .binary_search_by_key(&language_server_id, |e| e.0)
4379 {
4380 self.diagnostics[ix]
4381 .1
4382 .groups(language_server_id, &mut groups, self);
4383 }
4384 } else {
4385 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4386 diagnostics.groups(*language_server_id, &mut groups, self);
4387 }
4388 }
4389
4390 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4391 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4392 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4393 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4394 });
4395
4396 groups
4397 }
4398
4399 /// Returns an iterator over the diagnostics for the given group.
4400 pub fn diagnostic_group<O>(
4401 &self,
4402 group_id: usize,
4403 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4404 where
4405 O: FromAnchor + 'static,
4406 {
4407 self.diagnostics
4408 .iter()
4409 .flat_map(move |(_, set)| set.group(group_id, self))
4410 }
4411
4412 /// An integer version number that accounts for all updates besides
4413 /// the buffer's text itself (which is versioned via a version vector).
4414 pub fn non_text_state_update_count(&self) -> usize {
4415 self.non_text_state_update_count
4416 }
4417
4418 /// An integer version that changes when the buffer's syntax changes.
4419 pub fn syntax_update_count(&self) -> usize {
4420 self.syntax.update_count()
4421 }
4422
4423 /// Returns a snapshot of underlying file.
4424 pub fn file(&self) -> Option<&Arc<dyn File>> {
4425 self.file.as_ref()
4426 }
4427
4428 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4429 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4430 if let Some(file) = self.file() {
4431 if file.path().file_name().is_none() || include_root {
4432 Some(file.full_path(cx))
4433 } else {
4434 Some(file.path().to_path_buf())
4435 }
4436 } else {
4437 None
4438 }
4439 }
4440
4441 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4442 let query_str = query.fuzzy_contents;
4443 if query_str.is_some_and(|query| query.is_empty()) {
4444 return BTreeMap::default();
4445 }
4446
4447 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4448 language,
4449 override_id: None,
4450 }));
4451
4452 let mut query_ix = 0;
4453 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4454 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4455
4456 let mut words = BTreeMap::default();
4457 let mut current_word_start_ix = None;
4458 let mut chunk_ix = query.range.start;
4459 for chunk in self.chunks(query.range, false) {
4460 for (i, c) in chunk.text.char_indices() {
4461 let ix = chunk_ix + i;
4462 if classifier.is_word(c) {
4463 if current_word_start_ix.is_none() {
4464 current_word_start_ix = Some(ix);
4465 }
4466
4467 if let Some(query_chars) = &query_chars
4468 && query_ix < query_len
4469 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4470 {
4471 query_ix += 1;
4472 }
4473 continue;
4474 } else if let Some(word_start) = current_word_start_ix.take()
4475 && query_ix == query_len
4476 {
4477 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4478 let mut word_text = self.text_for_range(word_start..ix).peekable();
4479 let first_char = word_text
4480 .peek()
4481 .and_then(|first_chunk| first_chunk.chars().next());
4482 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4483 if !query.skip_digits
4484 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4485 {
4486 words.insert(word_text.collect(), word_range);
4487 }
4488 }
4489 query_ix = 0;
4490 }
4491 chunk_ix += chunk.text.len();
4492 }
4493
4494 words
4495 }
4496}
4497
4498pub struct WordsQuery<'a> {
4499 /// Only returns words with all chars from the fuzzy string in them.
4500 pub fuzzy_contents: Option<&'a str>,
4501 /// Skips words that start with a digit.
4502 pub skip_digits: bool,
4503 /// Buffer offset range, to look for words.
4504 pub range: Range<usize>,
4505}
4506
4507fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4508 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4509}
4510
4511fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4512 let mut result = IndentSize::spaces(0);
4513 for c in text {
4514 let kind = match c {
4515 ' ' => IndentKind::Space,
4516 '\t' => IndentKind::Tab,
4517 _ => break,
4518 };
4519 if result.len == 0 {
4520 result.kind = kind;
4521 }
4522 result.len += 1;
4523 }
4524 result
4525}
4526
4527impl Clone for BufferSnapshot {
4528 fn clone(&self) -> Self {
4529 Self {
4530 text: self.text.clone(),
4531 syntax: self.syntax.clone(),
4532 file: self.file.clone(),
4533 remote_selections: self.remote_selections.clone(),
4534 diagnostics: self.diagnostics.clone(),
4535 language: self.language.clone(),
4536 non_text_state_update_count: self.non_text_state_update_count,
4537 }
4538 }
4539}
4540
4541impl Deref for BufferSnapshot {
4542 type Target = text::BufferSnapshot;
4543
4544 fn deref(&self) -> &Self::Target {
4545 &self.text
4546 }
4547}
4548
4549unsafe impl Send for BufferChunks<'_> {}
4550
4551impl<'a> BufferChunks<'a> {
4552 pub(crate) fn new(
4553 text: &'a Rope,
4554 range: Range<usize>,
4555 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4556 diagnostics: bool,
4557 buffer_snapshot: Option<&'a BufferSnapshot>,
4558 ) -> Self {
4559 let mut highlights = None;
4560 if let Some((captures, highlight_maps)) = syntax {
4561 highlights = Some(BufferChunkHighlights {
4562 captures,
4563 next_capture: None,
4564 stack: Default::default(),
4565 highlight_maps,
4566 })
4567 }
4568
4569 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4570 let chunks = text.chunks_in_range(range.clone());
4571
4572 let mut this = BufferChunks {
4573 range,
4574 buffer_snapshot,
4575 chunks,
4576 diagnostic_endpoints,
4577 error_depth: 0,
4578 warning_depth: 0,
4579 information_depth: 0,
4580 hint_depth: 0,
4581 unnecessary_depth: 0,
4582 underline: true,
4583 highlights,
4584 };
4585 this.initialize_diagnostic_endpoints();
4586 this
4587 }
4588
4589 /// Seeks to the given byte offset in the buffer.
4590 pub fn seek(&mut self, range: Range<usize>) {
4591 let old_range = std::mem::replace(&mut self.range, range.clone());
4592 self.chunks.set_range(self.range.clone());
4593 if let Some(highlights) = self.highlights.as_mut() {
4594 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4595 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4596 highlights
4597 .stack
4598 .retain(|(end_offset, _)| *end_offset > range.start);
4599 if let Some(capture) = &highlights.next_capture
4600 && range.start >= capture.node.start_byte()
4601 {
4602 let next_capture_end = capture.node.end_byte();
4603 if range.start < next_capture_end {
4604 highlights.stack.push((
4605 next_capture_end,
4606 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4607 ));
4608 }
4609 highlights.next_capture.take();
4610 }
4611 } else if let Some(snapshot) = self.buffer_snapshot {
4612 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4613 *highlights = BufferChunkHighlights {
4614 captures,
4615 next_capture: None,
4616 stack: Default::default(),
4617 highlight_maps,
4618 };
4619 } else {
4620 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4621 // Seeking such BufferChunks is not supported.
4622 debug_assert!(
4623 false,
4624 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4625 );
4626 }
4627
4628 highlights.captures.set_byte_range(self.range.clone());
4629 self.initialize_diagnostic_endpoints();
4630 }
4631 }
4632
4633 fn initialize_diagnostic_endpoints(&mut self) {
4634 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4635 && let Some(buffer) = self.buffer_snapshot
4636 {
4637 let mut diagnostic_endpoints = Vec::new();
4638 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4639 diagnostic_endpoints.push(DiagnosticEndpoint {
4640 offset: entry.range.start,
4641 is_start: true,
4642 severity: entry.diagnostic.severity,
4643 is_unnecessary: entry.diagnostic.is_unnecessary,
4644 underline: entry.diagnostic.underline,
4645 });
4646 diagnostic_endpoints.push(DiagnosticEndpoint {
4647 offset: entry.range.end,
4648 is_start: false,
4649 severity: entry.diagnostic.severity,
4650 is_unnecessary: entry.diagnostic.is_unnecessary,
4651 underline: entry.diagnostic.underline,
4652 });
4653 }
4654 diagnostic_endpoints
4655 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4656 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4657 self.hint_depth = 0;
4658 self.error_depth = 0;
4659 self.warning_depth = 0;
4660 self.information_depth = 0;
4661 }
4662 }
4663
4664 /// The current byte offset in the buffer.
4665 pub fn offset(&self) -> usize {
4666 self.range.start
4667 }
4668
4669 pub fn range(&self) -> Range<usize> {
4670 self.range.clone()
4671 }
4672
4673 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4674 let depth = match endpoint.severity {
4675 DiagnosticSeverity::ERROR => &mut self.error_depth,
4676 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4677 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4678 DiagnosticSeverity::HINT => &mut self.hint_depth,
4679 _ => return,
4680 };
4681 if endpoint.is_start {
4682 *depth += 1;
4683 } else {
4684 *depth -= 1;
4685 }
4686
4687 if endpoint.is_unnecessary {
4688 if endpoint.is_start {
4689 self.unnecessary_depth += 1;
4690 } else {
4691 self.unnecessary_depth -= 1;
4692 }
4693 }
4694 }
4695
4696 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4697 if self.error_depth > 0 {
4698 Some(DiagnosticSeverity::ERROR)
4699 } else if self.warning_depth > 0 {
4700 Some(DiagnosticSeverity::WARNING)
4701 } else if self.information_depth > 0 {
4702 Some(DiagnosticSeverity::INFORMATION)
4703 } else if self.hint_depth > 0 {
4704 Some(DiagnosticSeverity::HINT)
4705 } else {
4706 None
4707 }
4708 }
4709
4710 fn current_code_is_unnecessary(&self) -> bool {
4711 self.unnecessary_depth > 0
4712 }
4713}
4714
4715impl<'a> Iterator for BufferChunks<'a> {
4716 type Item = Chunk<'a>;
4717
4718 fn next(&mut self) -> Option<Self::Item> {
4719 let mut next_capture_start = usize::MAX;
4720 let mut next_diagnostic_endpoint = usize::MAX;
4721
4722 if let Some(highlights) = self.highlights.as_mut() {
4723 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4724 if *parent_capture_end <= self.range.start {
4725 highlights.stack.pop();
4726 } else {
4727 break;
4728 }
4729 }
4730
4731 if highlights.next_capture.is_none() {
4732 highlights.next_capture = highlights.captures.next();
4733 }
4734
4735 while let Some(capture) = highlights.next_capture.as_ref() {
4736 if self.range.start < capture.node.start_byte() {
4737 next_capture_start = capture.node.start_byte();
4738 break;
4739 } else {
4740 let highlight_id =
4741 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4742 highlights
4743 .stack
4744 .push((capture.node.end_byte(), highlight_id));
4745 highlights.next_capture = highlights.captures.next();
4746 }
4747 }
4748 }
4749
4750 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4751 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4752 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4753 if endpoint.offset <= self.range.start {
4754 self.update_diagnostic_depths(endpoint);
4755 diagnostic_endpoints.next();
4756 self.underline = endpoint.underline;
4757 } else {
4758 next_diagnostic_endpoint = endpoint.offset;
4759 break;
4760 }
4761 }
4762 }
4763 self.diagnostic_endpoints = diagnostic_endpoints;
4764
4765 if let Some(chunk) = self.chunks.peek() {
4766 let chunk_start = self.range.start;
4767 let mut chunk_end = (self.chunks.offset() + chunk.len())
4768 .min(next_capture_start)
4769 .min(next_diagnostic_endpoint);
4770 let mut highlight_id = None;
4771 if let Some(highlights) = self.highlights.as_ref()
4772 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4773 {
4774 chunk_end = chunk_end.min(*parent_capture_end);
4775 highlight_id = Some(*parent_highlight_id);
4776 }
4777
4778 let slice =
4779 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4780 self.range.start = chunk_end;
4781 if self.range.start == self.chunks.offset() + chunk.len() {
4782 self.chunks.next().unwrap();
4783 }
4784
4785 Some(Chunk {
4786 text: slice,
4787 syntax_highlight_id: highlight_id,
4788 underline: self.underline,
4789 diagnostic_severity: self.current_diagnostic_severity(),
4790 is_unnecessary: self.current_code_is_unnecessary(),
4791 ..Chunk::default()
4792 })
4793 } else {
4794 None
4795 }
4796 }
4797}
4798
4799impl operation_queue::Operation for Operation {
4800 fn lamport_timestamp(&self) -> clock::Lamport {
4801 match self {
4802 Operation::Buffer(_) => {
4803 unreachable!("buffer operations should never be deferred at this layer")
4804 }
4805 Operation::UpdateDiagnostics {
4806 lamport_timestamp, ..
4807 }
4808 | Operation::UpdateSelections {
4809 lamport_timestamp, ..
4810 }
4811 | Operation::UpdateCompletionTriggers {
4812 lamport_timestamp, ..
4813 } => *lamport_timestamp,
4814 }
4815 }
4816}
4817
4818impl Default for Diagnostic {
4819 fn default() -> Self {
4820 Self {
4821 source: Default::default(),
4822 source_kind: DiagnosticSourceKind::Other,
4823 code: None,
4824 code_description: None,
4825 severity: DiagnosticSeverity::ERROR,
4826 message: Default::default(),
4827 markdown: None,
4828 group_id: 0,
4829 is_primary: false,
4830 is_disk_based: false,
4831 is_unnecessary: false,
4832 underline: true,
4833 data: None,
4834 }
4835 }
4836}
4837
4838impl IndentSize {
4839 /// Returns an [`IndentSize`] representing the given spaces.
4840 pub fn spaces(len: u32) -> Self {
4841 Self {
4842 len,
4843 kind: IndentKind::Space,
4844 }
4845 }
4846
4847 /// Returns an [`IndentSize`] representing a tab.
4848 pub fn tab() -> Self {
4849 Self {
4850 len: 1,
4851 kind: IndentKind::Tab,
4852 }
4853 }
4854
4855 /// An iterator over the characters represented by this [`IndentSize`].
4856 pub fn chars(&self) -> impl Iterator<Item = char> {
4857 iter::repeat(self.char()).take(self.len as usize)
4858 }
4859
4860 /// The character representation of this [`IndentSize`].
4861 pub fn char(&self) -> char {
4862 match self.kind {
4863 IndentKind::Space => ' ',
4864 IndentKind::Tab => '\t',
4865 }
4866 }
4867
4868 /// Consumes the current [`IndentSize`] and returns a new one that has
4869 /// been shrunk or enlarged by the given size along the given direction.
4870 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4871 match direction {
4872 Ordering::Less => {
4873 if self.kind == size.kind && self.len >= size.len {
4874 self.len -= size.len;
4875 }
4876 }
4877 Ordering::Equal => {}
4878 Ordering::Greater => {
4879 if self.len == 0 {
4880 self = size;
4881 } else if self.kind == size.kind {
4882 self.len += size.len;
4883 }
4884 }
4885 }
4886 self
4887 }
4888
4889 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4890 match self.kind {
4891 IndentKind::Space => self.len as usize,
4892 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4893 }
4894 }
4895}
4896
4897#[cfg(any(test, feature = "test-support"))]
4898pub struct TestFile {
4899 pub path: Arc<Path>,
4900 pub root_name: String,
4901 pub local_root: Option<PathBuf>,
4902}
4903
4904#[cfg(any(test, feature = "test-support"))]
4905impl File for TestFile {
4906 fn path(&self) -> &Arc<Path> {
4907 &self.path
4908 }
4909
4910 fn full_path(&self, _: &gpui::App) -> PathBuf {
4911 PathBuf::from(&self.root_name).join(self.path.as_ref())
4912 }
4913
4914 fn as_local(&self) -> Option<&dyn LocalFile> {
4915 if self.local_root.is_some() {
4916 Some(self)
4917 } else {
4918 None
4919 }
4920 }
4921
4922 fn disk_state(&self) -> DiskState {
4923 unimplemented!()
4924 }
4925
4926 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4927 self.path().file_name().unwrap_or(self.root_name.as_ref())
4928 }
4929
4930 fn worktree_id(&self, _: &App) -> WorktreeId {
4931 WorktreeId::from_usize(0)
4932 }
4933
4934 fn to_proto(&self, _: &App) -> rpc::proto::File {
4935 unimplemented!()
4936 }
4937
4938 fn is_private(&self) -> bool {
4939 false
4940 }
4941}
4942
4943#[cfg(any(test, feature = "test-support"))]
4944impl LocalFile for TestFile {
4945 fn abs_path(&self, _cx: &App) -> PathBuf {
4946 PathBuf::from(self.local_root.as_ref().unwrap())
4947 .join(&self.root_name)
4948 .join(self.path.as_ref())
4949 }
4950
4951 fn load(&self, _cx: &App) -> Task<Result<String>> {
4952 unimplemented!()
4953 }
4954
4955 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4956 unimplemented!()
4957 }
4958}
4959
4960pub(crate) fn contiguous_ranges(
4961 values: impl Iterator<Item = u32>,
4962 max_len: usize,
4963) -> impl Iterator<Item = Range<u32>> {
4964 let mut values = values;
4965 let mut current_range: Option<Range<u32>> = None;
4966 std::iter::from_fn(move || {
4967 loop {
4968 if let Some(value) = values.next() {
4969 if let Some(range) = &mut current_range
4970 && value == range.end
4971 && range.len() < max_len
4972 {
4973 range.end += 1;
4974 continue;
4975 }
4976
4977 let prev_range = current_range.clone();
4978 current_range = Some(value..(value + 1));
4979 if prev_range.is_some() {
4980 return prev_range;
4981 }
4982 } else {
4983 return current_range.take();
4984 }
4985 }
4986 })
4987}
4988
4989#[derive(Default, Debug)]
4990pub struct CharClassifier {
4991 scope: Option<LanguageScope>,
4992 for_completion: bool,
4993 ignore_punctuation: bool,
4994}
4995
4996impl CharClassifier {
4997 pub fn new(scope: Option<LanguageScope>) -> Self {
4998 Self {
4999 scope,
5000 for_completion: false,
5001 ignore_punctuation: false,
5002 }
5003 }
5004
5005 pub fn for_completion(self, for_completion: bool) -> Self {
5006 Self {
5007 for_completion,
5008 ..self
5009 }
5010 }
5011
5012 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5013 Self {
5014 ignore_punctuation,
5015 ..self
5016 }
5017 }
5018
5019 pub fn is_whitespace(&self, c: char) -> bool {
5020 self.kind(c) == CharKind::Whitespace
5021 }
5022
5023 pub fn is_word(&self, c: char) -> bool {
5024 self.kind(c) == CharKind::Word
5025 }
5026
5027 pub fn is_punctuation(&self, c: char) -> bool {
5028 self.kind(c) == CharKind::Punctuation
5029 }
5030
5031 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5032 if c.is_alphanumeric() || c == '_' {
5033 return CharKind::Word;
5034 }
5035
5036 if let Some(scope) = &self.scope {
5037 let characters = if self.for_completion {
5038 scope.completion_query_characters()
5039 } else {
5040 scope.word_characters()
5041 };
5042 if let Some(characters) = characters
5043 && characters.contains(&c)
5044 {
5045 return CharKind::Word;
5046 }
5047 }
5048
5049 if c.is_whitespace() {
5050 return CharKind::Whitespace;
5051 }
5052
5053 if ignore_punctuation {
5054 CharKind::Word
5055 } else {
5056 CharKind::Punctuation
5057 }
5058 }
5059
5060 pub fn kind(&self, c: char) -> CharKind {
5061 self.kind_with(c, self.ignore_punctuation)
5062 }
5063}
5064
5065/// Find all of the ranges of whitespace that occur at the ends of lines
5066/// in the given rope.
5067///
5068/// This could also be done with a regex search, but this implementation
5069/// avoids copying text.
5070pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5071 let mut ranges = Vec::new();
5072
5073 let mut offset = 0;
5074 let mut prev_chunk_trailing_whitespace_range = 0..0;
5075 for chunk in rope.chunks() {
5076 let mut prev_line_trailing_whitespace_range = 0..0;
5077 for (i, line) in chunk.split('\n').enumerate() {
5078 let line_end_offset = offset + line.len();
5079 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5080 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5081
5082 if i == 0 && trimmed_line_len == 0 {
5083 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5084 }
5085 if !prev_line_trailing_whitespace_range.is_empty() {
5086 ranges.push(prev_line_trailing_whitespace_range);
5087 }
5088
5089 offset = line_end_offset + 1;
5090 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5091 }
5092
5093 offset -= 1;
5094 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5095 }
5096
5097 if !prev_chunk_trailing_whitespace_range.is_empty() {
5098 ranges.push(prev_chunk_trailing_whitespace_range);
5099 }
5100
5101 ranges
5102}