1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation
1162 && let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1188 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1189 {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193
1194 self.apply_ops([operation.clone()], cx);
1195
1196 if let Some(timestamp) = operation_to_undo {
1197 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1198 self.undo_operations(counts, cx);
1199 }
1200 }
1201
1202 #[cfg(test)]
1203 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1204 &self.text
1205 }
1206
1207 /// Retrieve a snapshot of the buffer's raw text, without any
1208 /// language-related state like the syntax tree or diagnostics.
1209 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1210 self.text.snapshot()
1211 }
1212
1213 /// The file associated with the buffer, if any.
1214 pub fn file(&self) -> Option<&Arc<dyn File>> {
1215 self.file.as_ref()
1216 }
1217
1218 /// The version of the buffer that was last saved or reloaded from disk.
1219 pub fn saved_version(&self) -> &clock::Global {
1220 &self.saved_version
1221 }
1222
1223 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1224 pub fn saved_mtime(&self) -> Option<MTime> {
1225 self.saved_mtime
1226 }
1227
1228 /// Assign a language to the buffer.
1229 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1230 self.non_text_state_update_count += 1;
1231 self.syntax_map.lock().clear(&self.text);
1232 self.language = language;
1233 self.was_changed();
1234 self.reparse(cx);
1235 cx.emit(BufferEvent::LanguageChanged);
1236 }
1237
1238 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1239 /// other languages if parts of the buffer are written in different languages.
1240 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1241 self.syntax_map
1242 .lock()
1243 .set_language_registry(language_registry);
1244 }
1245
1246 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1247 self.syntax_map.lock().language_registry()
1248 }
1249
1250 /// Assign the buffer a new [`Capability`].
1251 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1252 self.capability = capability;
1253 cx.emit(BufferEvent::CapabilityChanged)
1254 }
1255
1256 /// This method is called to signal that the buffer has been saved.
1257 pub fn did_save(
1258 &mut self,
1259 version: clock::Global,
1260 mtime: Option<MTime>,
1261 cx: &mut Context<Self>,
1262 ) {
1263 self.saved_version = version;
1264 self.has_unsaved_edits
1265 .set((self.saved_version().clone(), false));
1266 self.has_conflict = false;
1267 self.saved_mtime = mtime;
1268 self.was_changed();
1269 cx.emit(BufferEvent::Saved);
1270 cx.notify();
1271 }
1272
1273 /// This method is called to signal that the buffer has been discarded.
1274 pub fn discarded(&self, cx: &mut Context<Self>) {
1275 cx.emit(BufferEvent::Discarded);
1276 cx.notify();
1277 }
1278
1279 /// Reloads the contents of the buffer from disk.
1280 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1281 let (tx, rx) = futures::channel::oneshot::channel();
1282 let prev_version = self.text.version();
1283 self.reload_task = Some(cx.spawn(async move |this, cx| {
1284 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1285 let file = this.file.as_ref()?.as_local()?;
1286
1287 Some((file.disk_state().mtime(), file.load(cx)))
1288 })?
1289 else {
1290 return Ok(());
1291 };
1292
1293 let new_text = new_text.await?;
1294 let diff = this
1295 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1296 .await;
1297 this.update(cx, |this, cx| {
1298 if this.version() == diff.base_version {
1299 this.finalize_last_transaction();
1300 this.apply_diff(diff, cx);
1301 tx.send(this.finalize_last_transaction().cloned()).ok();
1302 this.has_conflict = false;
1303 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1304 } else {
1305 if !diff.edits.is_empty()
1306 || this
1307 .edits_since::<usize>(&diff.base_version)
1308 .next()
1309 .is_some()
1310 {
1311 this.has_conflict = true;
1312 }
1313
1314 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1315 }
1316
1317 this.reload_task.take();
1318 })
1319 }));
1320 rx
1321 }
1322
1323 /// This method is called to signal that the buffer has been reloaded.
1324 pub fn did_reload(
1325 &mut self,
1326 version: clock::Global,
1327 line_ending: LineEnding,
1328 mtime: Option<MTime>,
1329 cx: &mut Context<Self>,
1330 ) {
1331 self.saved_version = version;
1332 self.has_unsaved_edits
1333 .set((self.saved_version.clone(), false));
1334 self.text.set_line_ending(line_ending);
1335 self.saved_mtime = mtime;
1336 cx.emit(BufferEvent::Reloaded);
1337 cx.notify();
1338 }
1339
1340 /// Updates the [`File`] backing this buffer. This should be called when
1341 /// the file has changed or has been deleted.
1342 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1343 let was_dirty = self.is_dirty();
1344 let mut file_changed = false;
1345
1346 if let Some(old_file) = self.file.as_ref() {
1347 if new_file.path() != old_file.path() {
1348 file_changed = true;
1349 }
1350
1351 let old_state = old_file.disk_state();
1352 let new_state = new_file.disk_state();
1353 if old_state != new_state {
1354 file_changed = true;
1355 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1356 cx.emit(BufferEvent::ReloadNeeded)
1357 }
1358 }
1359 } else {
1360 file_changed = true;
1361 };
1362
1363 self.file = Some(new_file);
1364 if file_changed {
1365 self.was_changed();
1366 self.non_text_state_update_count += 1;
1367 if was_dirty != self.is_dirty() {
1368 cx.emit(BufferEvent::DirtyChanged);
1369 }
1370 cx.emit(BufferEvent::FileHandleChanged);
1371 cx.notify();
1372 }
1373 }
1374
1375 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1376 Some(self.branch_state.as_ref()?.base_buffer.clone())
1377 }
1378
1379 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1380 pub fn language(&self) -> Option<&Arc<Language>> {
1381 self.language.as_ref()
1382 }
1383
1384 /// Returns the [`Language`] at the given location.
1385 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1386 let offset = position.to_offset(self);
1387 let mut is_first = true;
1388 let start_anchor = self.anchor_before(offset);
1389 let end_anchor = self.anchor_after(offset);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .filter(|layer| {
1394 if is_first {
1395 is_first = false;
1396 return true;
1397 }
1398 let any_sub_ranges_contain_range = layer
1399 .included_sub_ranges
1400 .map(|sub_ranges| {
1401 sub_ranges.iter().any(|sub_range| {
1402 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1403 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1404 !is_before_start && !is_after_end
1405 })
1406 })
1407 .unwrap_or(true);
1408 let result = any_sub_ranges_contain_range;
1409 result
1410 })
1411 .last()
1412 .map(|info| info.language.clone())
1413 .or_else(|| self.language.clone())
1414 }
1415
1416 /// Returns each [`Language`] for the active syntax layers at the given location.
1417 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1418 let offset = position.to_offset(self);
1419 let mut languages: Vec<Arc<Language>> = self
1420 .syntax_map
1421 .lock()
1422 .layers_for_range(offset..offset, &self.text, false)
1423 .map(|info| info.language.clone())
1424 .collect();
1425
1426 if languages.is_empty()
1427 && let Some(buffer_language) = self.language()
1428 {
1429 languages.push(buffer_language.clone());
1430 }
1431
1432 languages
1433 }
1434
1435 /// An integer version number that accounts for all updates besides
1436 /// the buffer's text itself (which is versioned via a version vector).
1437 pub fn non_text_state_update_count(&self) -> usize {
1438 self.non_text_state_update_count
1439 }
1440
1441 /// Whether the buffer is being parsed in the background.
1442 #[cfg(any(test, feature = "test-support"))]
1443 pub fn is_parsing(&self) -> bool {
1444 self.reparse.is_some()
1445 }
1446
1447 /// Indicates whether the buffer contains any regions that may be
1448 /// written in a language that hasn't been loaded yet.
1449 pub fn contains_unknown_injections(&self) -> bool {
1450 self.syntax_map.lock().contains_unknown_injections()
1451 }
1452
1453 #[cfg(any(test, feature = "test-support"))]
1454 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1455 self.sync_parse_timeout = timeout;
1456 }
1457
1458 /// Called after an edit to synchronize the buffer's main parse tree with
1459 /// the buffer's new underlying state.
1460 ///
1461 /// Locks the syntax map and interpolates the edits since the last reparse
1462 /// into the foreground syntax tree.
1463 ///
1464 /// Then takes a stable snapshot of the syntax map before unlocking it.
1465 /// The snapshot with the interpolated edits is sent to a background thread,
1466 /// where we ask Tree-sitter to perform an incremental parse.
1467 ///
1468 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1469 /// waiting on the parse to complete. As soon as it completes, we proceed
1470 /// synchronously, unless a 1ms timeout elapses.
1471 ///
1472 /// If we time out waiting on the parse, we spawn a second task waiting
1473 /// until the parse does complete and return with the interpolated tree still
1474 /// in the foreground. When the background parse completes, call back into
1475 /// the main thread and assign the foreground parse state.
1476 ///
1477 /// If the buffer or grammar changed since the start of the background parse,
1478 /// initiate an additional reparse recursively. To avoid concurrent parses
1479 /// for the same buffer, we only initiate a new parse if we are not already
1480 /// parsing in the background.
1481 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1482 if self.reparse.is_some() {
1483 return;
1484 }
1485 let language = if let Some(language) = self.language.clone() {
1486 language
1487 } else {
1488 return;
1489 };
1490
1491 let text = self.text_snapshot();
1492 let parsed_version = self.version();
1493
1494 let mut syntax_map = self.syntax_map.lock();
1495 syntax_map.interpolate(&text);
1496 let language_registry = syntax_map.language_registry();
1497 let mut syntax_snapshot = syntax_map.snapshot();
1498 drop(syntax_map);
1499
1500 let parse_task = cx.background_spawn({
1501 let language = language.clone();
1502 let language_registry = language_registry.clone();
1503 async move {
1504 syntax_snapshot.reparse(&text, language_registry, language);
1505 syntax_snapshot
1506 }
1507 });
1508
1509 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1510 match cx
1511 .background_executor()
1512 .block_with_timeout(self.sync_parse_timeout, parse_task)
1513 {
1514 Ok(new_syntax_snapshot) => {
1515 self.did_finish_parsing(new_syntax_snapshot, cx);
1516 self.reparse = None;
1517 }
1518 Err(parse_task) => {
1519 self.reparse = Some(cx.spawn(async move |this, cx| {
1520 let new_syntax_map = parse_task.await;
1521 this.update(cx, move |this, cx| {
1522 let grammar_changed =
1523 this.language.as_ref().is_none_or(|current_language| {
1524 !Arc::ptr_eq(&language, current_language)
1525 });
1526 let language_registry_changed = new_syntax_map
1527 .contains_unknown_injections()
1528 && language_registry.is_some_and(|registry| {
1529 registry.version() != new_syntax_map.language_registry_version()
1530 });
1531 let parse_again = language_registry_changed
1532 || grammar_changed
1533 || this.version.changed_since(&parsed_version);
1534 this.did_finish_parsing(new_syntax_map, cx);
1535 this.reparse = None;
1536 if parse_again {
1537 this.reparse(cx);
1538 }
1539 })
1540 .ok();
1541 }));
1542 }
1543 }
1544 }
1545
1546 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1547 self.was_changed();
1548 self.non_text_state_update_count += 1;
1549 self.syntax_map.lock().did_parse(syntax_snapshot);
1550 self.request_autoindent(cx);
1551 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1552 cx.emit(BufferEvent::Reparsed);
1553 cx.notify();
1554 }
1555
1556 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1557 self.parse_status.1.clone()
1558 }
1559
1560 /// Assign to the buffer a set of diagnostics created by a given language server.
1561 pub fn update_diagnostics(
1562 &mut self,
1563 server_id: LanguageServerId,
1564 diagnostics: DiagnosticSet,
1565 cx: &mut Context<Self>,
1566 ) {
1567 let lamport_timestamp = self.text.lamport_clock.tick();
1568 let op = Operation::UpdateDiagnostics {
1569 server_id,
1570 diagnostics: diagnostics.iter().cloned().collect(),
1571 lamport_timestamp,
1572 };
1573
1574 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1575 self.send_operation(op, true, cx);
1576 }
1577
1578 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1579 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1580 return None;
1581 };
1582 Some(&self.diagnostics[idx].1)
1583 }
1584
1585 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1586 if let Some(indent_sizes) = self.compute_autoindents() {
1587 let indent_sizes = cx.background_spawn(indent_sizes);
1588 match cx
1589 .background_executor()
1590 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1591 {
1592 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1593 Err(indent_sizes) => {
1594 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1595 let indent_sizes = indent_sizes.await;
1596 this.update(cx, |this, cx| {
1597 this.apply_autoindents(indent_sizes, cx);
1598 })
1599 .ok();
1600 }));
1601 }
1602 }
1603 } else {
1604 self.autoindent_requests.clear();
1605 for tx in self.wait_for_autoindent_txs.drain(..) {
1606 tx.send(()).ok();
1607 }
1608 }
1609 }
1610
1611 fn compute_autoindents(
1612 &self,
1613 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1614 let max_rows_between_yields = 100;
1615 let snapshot = self.snapshot();
1616 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1617 return None;
1618 }
1619
1620 let autoindent_requests = self.autoindent_requests.clone();
1621 Some(async move {
1622 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1623 for request in autoindent_requests {
1624 // Resolve each edited range to its row in the current buffer and in the
1625 // buffer before this batch of edits.
1626 let mut row_ranges = Vec::new();
1627 let mut old_to_new_rows = BTreeMap::new();
1628 let mut language_indent_sizes_by_new_row = Vec::new();
1629 for entry in &request.entries {
1630 let position = entry.range.start;
1631 let new_row = position.to_point(&snapshot).row;
1632 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1633 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1634
1635 if !entry.first_line_is_new {
1636 let old_row = position.to_point(&request.before_edit).row;
1637 old_to_new_rows.insert(old_row, new_row);
1638 }
1639 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1640 }
1641
1642 // Build a map containing the suggested indentation for each of the edited lines
1643 // with respect to the state of the buffer before these edits. This map is keyed
1644 // by the rows for these lines in the current state of the buffer.
1645 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1646 let old_edited_ranges =
1647 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1648 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1649 let mut language_indent_size = IndentSize::default();
1650 for old_edited_range in old_edited_ranges {
1651 let suggestions = request
1652 .before_edit
1653 .suggest_autoindents(old_edited_range.clone())
1654 .into_iter()
1655 .flatten();
1656 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1657 if let Some(suggestion) = suggestion {
1658 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1659
1660 // Find the indent size based on the language for this row.
1661 while let Some((row, size)) = language_indent_sizes.peek() {
1662 if *row > new_row {
1663 break;
1664 }
1665 language_indent_size = *size;
1666 language_indent_sizes.next();
1667 }
1668
1669 let suggested_indent = old_to_new_rows
1670 .get(&suggestion.basis_row)
1671 .and_then(|from_row| {
1672 Some(old_suggestions.get(from_row).copied()?.0)
1673 })
1674 .unwrap_or_else(|| {
1675 request
1676 .before_edit
1677 .indent_size_for_line(suggestion.basis_row)
1678 })
1679 .with_delta(suggestion.delta, language_indent_size);
1680 old_suggestions
1681 .insert(new_row, (suggested_indent, suggestion.within_error));
1682 }
1683 }
1684 yield_now().await;
1685 }
1686
1687 // Compute new suggestions for each line, but only include them in the result
1688 // if they differ from the old suggestion for that line.
1689 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1690 let mut language_indent_size = IndentSize::default();
1691 for (row_range, original_indent_column) in row_ranges {
1692 let new_edited_row_range = if request.is_block_mode {
1693 row_range.start..row_range.start + 1
1694 } else {
1695 row_range.clone()
1696 };
1697
1698 let suggestions = snapshot
1699 .suggest_autoindents(new_edited_row_range.clone())
1700 .into_iter()
1701 .flatten();
1702 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1703 if let Some(suggestion) = suggestion {
1704 // Find the indent size based on the language for this row.
1705 while let Some((row, size)) = language_indent_sizes.peek() {
1706 if *row > new_row {
1707 break;
1708 }
1709 language_indent_size = *size;
1710 language_indent_sizes.next();
1711 }
1712
1713 let suggested_indent = indent_sizes
1714 .get(&suggestion.basis_row)
1715 .copied()
1716 .map(|e| e.0)
1717 .unwrap_or_else(|| {
1718 snapshot.indent_size_for_line(suggestion.basis_row)
1719 })
1720 .with_delta(suggestion.delta, language_indent_size);
1721
1722 if old_suggestions.get(&new_row).is_none_or(
1723 |(old_indentation, was_within_error)| {
1724 suggested_indent != *old_indentation
1725 && (!suggestion.within_error || *was_within_error)
1726 },
1727 ) {
1728 indent_sizes.insert(
1729 new_row,
1730 (suggested_indent, request.ignore_empty_lines),
1731 );
1732 }
1733 }
1734 }
1735
1736 if let (true, Some(original_indent_column)) =
1737 (request.is_block_mode, original_indent_column)
1738 {
1739 let new_indent =
1740 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1741 *indent
1742 } else {
1743 snapshot.indent_size_for_line(row_range.start)
1744 };
1745 let delta = new_indent.len as i64 - original_indent_column as i64;
1746 if delta != 0 {
1747 for row in row_range.skip(1) {
1748 indent_sizes.entry(row).or_insert_with(|| {
1749 let mut size = snapshot.indent_size_for_line(row);
1750 if size.kind == new_indent.kind {
1751 match delta.cmp(&0) {
1752 Ordering::Greater => size.len += delta as u32,
1753 Ordering::Less => {
1754 size.len = size.len.saturating_sub(-delta as u32)
1755 }
1756 Ordering::Equal => {}
1757 }
1758 }
1759 (size, request.ignore_empty_lines)
1760 });
1761 }
1762 }
1763 }
1764
1765 yield_now().await;
1766 }
1767 }
1768
1769 indent_sizes
1770 .into_iter()
1771 .filter_map(|(row, (indent, ignore_empty_lines))| {
1772 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1773 None
1774 } else {
1775 Some((row, indent))
1776 }
1777 })
1778 .collect()
1779 })
1780 }
1781
1782 fn apply_autoindents(
1783 &mut self,
1784 indent_sizes: BTreeMap<u32, IndentSize>,
1785 cx: &mut Context<Self>,
1786 ) {
1787 self.autoindent_requests.clear();
1788 for tx in self.wait_for_autoindent_txs.drain(..) {
1789 tx.send(()).ok();
1790 }
1791
1792 let edits: Vec<_> = indent_sizes
1793 .into_iter()
1794 .filter_map(|(row, indent_size)| {
1795 let current_size = indent_size_for_line(self, row);
1796 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1797 })
1798 .collect();
1799
1800 let preserve_preview = self.preserve_preview();
1801 self.edit(edits, None, cx);
1802 if preserve_preview {
1803 self.refresh_preview();
1804 }
1805 }
1806
1807 /// Create a minimal edit that will cause the given row to be indented
1808 /// with the given size. After applying this edit, the length of the line
1809 /// will always be at least `new_size.len`.
1810 pub fn edit_for_indent_size_adjustment(
1811 row: u32,
1812 current_size: IndentSize,
1813 new_size: IndentSize,
1814 ) -> Option<(Range<Point>, String)> {
1815 if new_size.kind == current_size.kind {
1816 match new_size.len.cmp(¤t_size.len) {
1817 Ordering::Greater => {
1818 let point = Point::new(row, 0);
1819 Some((
1820 point..point,
1821 iter::repeat(new_size.char())
1822 .take((new_size.len - current_size.len) as usize)
1823 .collect::<String>(),
1824 ))
1825 }
1826
1827 Ordering::Less => Some((
1828 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1829 String::new(),
1830 )),
1831
1832 Ordering::Equal => None,
1833 }
1834 } else {
1835 Some((
1836 Point::new(row, 0)..Point::new(row, current_size.len),
1837 iter::repeat(new_size.char())
1838 .take(new_size.len as usize)
1839 .collect::<String>(),
1840 ))
1841 }
1842 }
1843
1844 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1845 /// and the given new text.
1846 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1847 let old_text = self.as_rope().clone();
1848 let base_version = self.version();
1849 cx.background_executor()
1850 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1851 let old_text = old_text.to_string();
1852 let line_ending = LineEnding::detect(&new_text);
1853 LineEnding::normalize(&mut new_text);
1854 let edits = text_diff(&old_text, &new_text);
1855 Diff {
1856 base_version,
1857 line_ending,
1858 edits,
1859 }
1860 })
1861 }
1862
1863 /// Spawns a background task that searches the buffer for any whitespace
1864 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1865 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1866 let old_text = self.as_rope().clone();
1867 let line_ending = self.line_ending();
1868 let base_version = self.version();
1869 cx.background_spawn(async move {
1870 let ranges = trailing_whitespace_ranges(&old_text);
1871 let empty = Arc::<str>::from("");
1872 Diff {
1873 base_version,
1874 line_ending,
1875 edits: ranges
1876 .into_iter()
1877 .map(|range| (range, empty.clone()))
1878 .collect(),
1879 }
1880 })
1881 }
1882
1883 /// Ensures that the buffer ends with a single newline character, and
1884 /// no other whitespace. Skips if the buffer is empty.
1885 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1886 let len = self.len();
1887 if len == 0 {
1888 return;
1889 }
1890 let mut offset = len;
1891 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1892 let non_whitespace_len = chunk
1893 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1894 .len();
1895 offset -= chunk.len();
1896 offset += non_whitespace_len;
1897 if non_whitespace_len != 0 {
1898 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1899 return;
1900 }
1901 break;
1902 }
1903 }
1904 self.edit([(offset..len, "\n")], None, cx);
1905 }
1906
1907 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1908 /// calculated, then adjust the diff to account for those changes, and discard any
1909 /// parts of the diff that conflict with those changes.
1910 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1911 let snapshot = self.snapshot();
1912 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1913 let mut delta = 0;
1914 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1915 while let Some(edit_since) = edits_since.peek() {
1916 // If the edit occurs after a diff hunk, then it does not
1917 // affect that hunk.
1918 if edit_since.old.start > range.end {
1919 break;
1920 }
1921 // If the edit precedes the diff hunk, then adjust the hunk
1922 // to reflect the edit.
1923 else if edit_since.old.end < range.start {
1924 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1925 edits_since.next();
1926 }
1927 // If the edit intersects a diff hunk, then discard that hunk.
1928 else {
1929 return None;
1930 }
1931 }
1932
1933 let start = (range.start as i64 + delta) as usize;
1934 let end = (range.end as i64 + delta) as usize;
1935 Some((start..end, new_text))
1936 });
1937
1938 self.start_transaction();
1939 self.text.set_line_ending(diff.line_ending);
1940 self.edit(adjusted_edits, None, cx);
1941 self.end_transaction(cx)
1942 }
1943
1944 fn has_unsaved_edits(&self) -> bool {
1945 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1946
1947 if last_version == self.version {
1948 self.has_unsaved_edits
1949 .set((last_version, has_unsaved_edits));
1950 return has_unsaved_edits;
1951 }
1952
1953 let has_edits = self.has_edits_since(&self.saved_version);
1954 self.has_unsaved_edits
1955 .set((self.version.clone(), has_edits));
1956 has_edits
1957 }
1958
1959 /// Checks if the buffer has unsaved changes.
1960 pub fn is_dirty(&self) -> bool {
1961 if self.capability == Capability::ReadOnly {
1962 return false;
1963 }
1964 if self.has_conflict {
1965 return true;
1966 }
1967 match self.file.as_ref().map(|f| f.disk_state()) {
1968 Some(DiskState::New) | Some(DiskState::Deleted) => {
1969 !self.is_empty() && self.has_unsaved_edits()
1970 }
1971 _ => self.has_unsaved_edits(),
1972 }
1973 }
1974
1975 /// Checks if the buffer and its file have both changed since the buffer
1976 /// was last saved or reloaded.
1977 pub fn has_conflict(&self) -> bool {
1978 if self.has_conflict {
1979 return true;
1980 }
1981 let Some(file) = self.file.as_ref() else {
1982 return false;
1983 };
1984 match file.disk_state() {
1985 DiskState::New => false,
1986 DiskState::Present { mtime } => match self.saved_mtime {
1987 Some(saved_mtime) => {
1988 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1989 }
1990 None => true,
1991 },
1992 DiskState::Deleted => false,
1993 }
1994 }
1995
1996 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1997 pub fn subscribe(&mut self) -> Subscription {
1998 self.text.subscribe()
1999 }
2000
2001 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2002 ///
2003 /// This allows downstream code to check if the buffer's text has changed without
2004 /// waiting for an effect cycle, which would be required if using eents.
2005 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2006 if let Err(ix) = self
2007 .change_bits
2008 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2009 {
2010 self.change_bits.insert(ix, bit);
2011 }
2012 }
2013
2014 fn was_changed(&mut self) {
2015 self.change_bits.retain(|change_bit| {
2016 change_bit.upgrade().is_some_and(|bit| {
2017 bit.replace(true);
2018 true
2019 })
2020 });
2021 }
2022
2023 /// Starts a transaction, if one is not already in-progress. When undoing or
2024 /// redoing edits, all of the edits performed within a transaction are undone
2025 /// or redone together.
2026 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2027 self.start_transaction_at(Instant::now())
2028 }
2029
2030 /// Starts a transaction, providing the current time. Subsequent transactions
2031 /// that occur within a short period of time will be grouped together. This
2032 /// is controlled by the buffer's undo grouping duration.
2033 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2034 self.transaction_depth += 1;
2035 if self.was_dirty_before_starting_transaction.is_none() {
2036 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2037 }
2038 self.text.start_transaction_at(now)
2039 }
2040
2041 /// Terminates the current transaction, if this is the outermost transaction.
2042 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2043 self.end_transaction_at(Instant::now(), cx)
2044 }
2045
2046 /// Terminates the current transaction, providing the current time. Subsequent transactions
2047 /// that occur within a short period of time will be grouped together. This
2048 /// is controlled by the buffer's undo grouping duration.
2049 pub fn end_transaction_at(
2050 &mut self,
2051 now: Instant,
2052 cx: &mut Context<Self>,
2053 ) -> Option<TransactionId> {
2054 assert!(self.transaction_depth > 0);
2055 self.transaction_depth -= 1;
2056 let was_dirty = if self.transaction_depth == 0 {
2057 self.was_dirty_before_starting_transaction.take().unwrap()
2058 } else {
2059 false
2060 };
2061 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2062 self.did_edit(&start_version, was_dirty, cx);
2063 Some(transaction_id)
2064 } else {
2065 None
2066 }
2067 }
2068
2069 /// Manually add a transaction to the buffer's undo history.
2070 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2071 self.text.push_transaction(transaction, now);
2072 }
2073
2074 /// Differs from `push_transaction` in that it does not clear the redo
2075 /// stack. Intended to be used to create a parent transaction to merge
2076 /// potential child transactions into.
2077 ///
2078 /// The caller is responsible for removing it from the undo history using
2079 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2080 /// are merged into this transaction, the caller is responsible for ensuring
2081 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2082 /// cleared is to create transactions with the usual `start_transaction` and
2083 /// `end_transaction` methods and merging the resulting transactions into
2084 /// the transaction created by this method
2085 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2086 self.text.push_empty_transaction(now)
2087 }
2088
2089 /// Prevent the last transaction from being grouped with any subsequent transactions,
2090 /// even if they occur with the buffer's undo grouping duration.
2091 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2092 self.text.finalize_last_transaction()
2093 }
2094
2095 /// Manually group all changes since a given transaction.
2096 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2097 self.text.group_until_transaction(transaction_id);
2098 }
2099
2100 /// Manually remove a transaction from the buffer's undo history
2101 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2102 self.text.forget_transaction(transaction_id)
2103 }
2104
2105 /// Retrieve a transaction from the buffer's undo history
2106 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2107 self.text.get_transaction(transaction_id)
2108 }
2109
2110 /// Manually merge two transactions in the buffer's undo history.
2111 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2112 self.text.merge_transactions(transaction, destination);
2113 }
2114
2115 /// Waits for the buffer to receive operations with the given timestamps.
2116 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2117 &mut self,
2118 edit_ids: It,
2119 ) -> impl Future<Output = Result<()>> + use<It> {
2120 self.text.wait_for_edits(edit_ids)
2121 }
2122
2123 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2124 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2125 &mut self,
2126 anchors: It,
2127 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2128 self.text.wait_for_anchors(anchors)
2129 }
2130
2131 /// Waits for the buffer to receive operations up to the given version.
2132 pub fn wait_for_version(
2133 &mut self,
2134 version: clock::Global,
2135 ) -> impl Future<Output = Result<()>> + use<> {
2136 self.text.wait_for_version(version)
2137 }
2138
2139 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2140 /// [`Buffer::wait_for_version`] to resolve with an error.
2141 pub fn give_up_waiting(&mut self) {
2142 self.text.give_up_waiting();
2143 }
2144
2145 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2146 let mut rx = None;
2147 if !self.autoindent_requests.is_empty() {
2148 let channel = oneshot::channel();
2149 self.wait_for_autoindent_txs.push(channel.0);
2150 rx = Some(channel.1);
2151 }
2152 rx
2153 }
2154
2155 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2156 pub fn set_active_selections(
2157 &mut self,
2158 selections: Arc<[Selection<Anchor>]>,
2159 line_mode: bool,
2160 cursor_shape: CursorShape,
2161 cx: &mut Context<Self>,
2162 ) {
2163 let lamport_timestamp = self.text.lamport_clock.tick();
2164 self.remote_selections.insert(
2165 self.text.replica_id(),
2166 SelectionSet {
2167 selections: selections.clone(),
2168 lamport_timestamp,
2169 line_mode,
2170 cursor_shape,
2171 },
2172 );
2173 self.send_operation(
2174 Operation::UpdateSelections {
2175 selections,
2176 line_mode,
2177 lamport_timestamp,
2178 cursor_shape,
2179 },
2180 true,
2181 cx,
2182 );
2183 self.non_text_state_update_count += 1;
2184 cx.notify();
2185 }
2186
2187 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2188 /// this replica.
2189 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2190 if self
2191 .remote_selections
2192 .get(&self.text.replica_id())
2193 .is_none_or(|set| !set.selections.is_empty())
2194 {
2195 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2196 }
2197 }
2198
2199 pub fn set_agent_selections(
2200 &mut self,
2201 selections: Arc<[Selection<Anchor>]>,
2202 line_mode: bool,
2203 cursor_shape: CursorShape,
2204 cx: &mut Context<Self>,
2205 ) {
2206 let lamport_timestamp = self.text.lamport_clock.tick();
2207 self.remote_selections.insert(
2208 AGENT_REPLICA_ID,
2209 SelectionSet {
2210 selections: selections.clone(),
2211 lamport_timestamp,
2212 line_mode,
2213 cursor_shape,
2214 },
2215 );
2216 self.non_text_state_update_count += 1;
2217 cx.notify();
2218 }
2219
2220 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2221 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2222 }
2223
2224 /// Replaces the buffer's entire text.
2225 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2226 where
2227 T: Into<Arc<str>>,
2228 {
2229 self.autoindent_requests.clear();
2230 self.edit([(0..self.len(), text)], None, cx)
2231 }
2232
2233 /// Appends the given text to the end of the buffer.
2234 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2235 where
2236 T: Into<Arc<str>>,
2237 {
2238 self.edit([(self.len()..self.len(), text)], None, cx)
2239 }
2240
2241 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2242 /// delete, and a string of text to insert at that location.
2243 ///
2244 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2245 /// request for the edited ranges, which will be processed when the buffer finishes
2246 /// parsing.
2247 ///
2248 /// Parsing takes place at the end of a transaction, and may compute synchronously
2249 /// or asynchronously, depending on the changes.
2250 pub fn edit<I, S, T>(
2251 &mut self,
2252 edits_iter: I,
2253 autoindent_mode: Option<AutoindentMode>,
2254 cx: &mut Context<Self>,
2255 ) -> Option<clock::Lamport>
2256 where
2257 I: IntoIterator<Item = (Range<S>, T)>,
2258 S: ToOffset,
2259 T: Into<Arc<str>>,
2260 {
2261 // Skip invalid edits and coalesce contiguous ones.
2262 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2263
2264 for (range, new_text) in edits_iter {
2265 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2266
2267 if range.start > range.end {
2268 mem::swap(&mut range.start, &mut range.end);
2269 }
2270 let new_text = new_text.into();
2271 if !new_text.is_empty() || !range.is_empty() {
2272 if let Some((prev_range, prev_text)) = edits.last_mut()
2273 && prev_range.end >= range.start
2274 {
2275 prev_range.end = cmp::max(prev_range.end, range.end);
2276 *prev_text = format!("{prev_text}{new_text}").into();
2277 } else {
2278 edits.push((range, new_text));
2279 }
2280 }
2281 }
2282 if edits.is_empty() {
2283 return None;
2284 }
2285
2286 self.start_transaction();
2287 self.pending_autoindent.take();
2288 let autoindent_request = autoindent_mode
2289 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2290
2291 let edit_operation = self.text.edit(edits.iter().cloned());
2292 let edit_id = edit_operation.timestamp();
2293
2294 if let Some((before_edit, mode)) = autoindent_request {
2295 let mut delta = 0isize;
2296 let mut previous_setting = None;
2297 let entries: Vec<_> = edits
2298 .into_iter()
2299 .enumerate()
2300 .zip(&edit_operation.as_edit().unwrap().new_text)
2301 .filter(|((_, (range, _)), _)| {
2302 let language = before_edit.language_at(range.start);
2303 let language_id = language.map(|l| l.id());
2304 if let Some((cached_language_id, auto_indent)) = previous_setting
2305 && cached_language_id == language_id
2306 {
2307 auto_indent
2308 } else {
2309 // The auto-indent setting is not present in editorconfigs, hence
2310 // we can avoid passing the file here.
2311 let auto_indent =
2312 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2313 previous_setting = Some((language_id, auto_indent));
2314 auto_indent
2315 }
2316 })
2317 .map(|((ix, (range, _)), new_text)| {
2318 let new_text_length = new_text.len();
2319 let old_start = range.start.to_point(&before_edit);
2320 let new_start = (delta + range.start as isize) as usize;
2321 let range_len = range.end - range.start;
2322 delta += new_text_length as isize - range_len as isize;
2323
2324 // Decide what range of the insertion to auto-indent, and whether
2325 // the first line of the insertion should be considered a newly-inserted line
2326 // or an edit to an existing line.
2327 let mut range_of_insertion_to_indent = 0..new_text_length;
2328 let mut first_line_is_new = true;
2329
2330 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2331 let old_line_end = before_edit.line_len(old_start.row);
2332
2333 if old_start.column > old_line_start {
2334 first_line_is_new = false;
2335 }
2336
2337 if !new_text.contains('\n')
2338 && (old_start.column + (range_len as u32) < old_line_end
2339 || old_line_end == old_line_start)
2340 {
2341 first_line_is_new = false;
2342 }
2343
2344 // When inserting text starting with a newline, avoid auto-indenting the
2345 // previous line.
2346 if new_text.starts_with('\n') {
2347 range_of_insertion_to_indent.start += 1;
2348 first_line_is_new = true;
2349 }
2350
2351 let mut original_indent_column = None;
2352 if let AutoindentMode::Block {
2353 original_indent_columns,
2354 } = &mode
2355 {
2356 original_indent_column = Some(if new_text.starts_with('\n') {
2357 indent_size_for_text(
2358 new_text[range_of_insertion_to_indent.clone()].chars(),
2359 )
2360 .len
2361 } else {
2362 original_indent_columns
2363 .get(ix)
2364 .copied()
2365 .flatten()
2366 .unwrap_or_else(|| {
2367 indent_size_for_text(
2368 new_text[range_of_insertion_to_indent.clone()].chars(),
2369 )
2370 .len
2371 })
2372 });
2373
2374 // Avoid auto-indenting the line after the edit.
2375 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2376 range_of_insertion_to_indent.end -= 1;
2377 }
2378 }
2379
2380 AutoindentRequestEntry {
2381 first_line_is_new,
2382 original_indent_column,
2383 indent_size: before_edit.language_indent_size_at(range.start, cx),
2384 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2385 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2386 }
2387 })
2388 .collect();
2389
2390 if !entries.is_empty() {
2391 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2392 before_edit,
2393 entries,
2394 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2395 ignore_empty_lines: false,
2396 }));
2397 }
2398 }
2399
2400 self.end_transaction(cx);
2401 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2402 Some(edit_id)
2403 }
2404
2405 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2406 self.was_changed();
2407
2408 if self.edits_since::<usize>(old_version).next().is_none() {
2409 return;
2410 }
2411
2412 self.reparse(cx);
2413 cx.emit(BufferEvent::Edited);
2414 if was_dirty != self.is_dirty() {
2415 cx.emit(BufferEvent::DirtyChanged);
2416 }
2417 cx.notify();
2418 }
2419
2420 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2421 where
2422 I: IntoIterator<Item = Range<T>>,
2423 T: ToOffset + Copy,
2424 {
2425 let before_edit = self.snapshot();
2426 let entries = ranges
2427 .into_iter()
2428 .map(|range| AutoindentRequestEntry {
2429 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2430 first_line_is_new: true,
2431 indent_size: before_edit.language_indent_size_at(range.start, cx),
2432 original_indent_column: None,
2433 })
2434 .collect();
2435 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2436 before_edit,
2437 entries,
2438 is_block_mode: false,
2439 ignore_empty_lines: true,
2440 }));
2441 self.request_autoindent(cx);
2442 }
2443
2444 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2445 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2446 pub fn insert_empty_line(
2447 &mut self,
2448 position: impl ToPoint,
2449 space_above: bool,
2450 space_below: bool,
2451 cx: &mut Context<Self>,
2452 ) -> Point {
2453 let mut position = position.to_point(self);
2454
2455 self.start_transaction();
2456
2457 self.edit(
2458 [(position..position, "\n")],
2459 Some(AutoindentMode::EachLine),
2460 cx,
2461 );
2462
2463 if position.column > 0 {
2464 position += Point::new(1, 0);
2465 }
2466
2467 if !self.is_line_blank(position.row) {
2468 self.edit(
2469 [(position..position, "\n")],
2470 Some(AutoindentMode::EachLine),
2471 cx,
2472 );
2473 }
2474
2475 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2476 self.edit(
2477 [(position..position, "\n")],
2478 Some(AutoindentMode::EachLine),
2479 cx,
2480 );
2481 position.row += 1;
2482 }
2483
2484 if space_below
2485 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2486 {
2487 self.edit(
2488 [(position..position, "\n")],
2489 Some(AutoindentMode::EachLine),
2490 cx,
2491 );
2492 }
2493
2494 self.end_transaction(cx);
2495
2496 position
2497 }
2498
2499 /// Applies the given remote operations to the buffer.
2500 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2501 self.pending_autoindent.take();
2502 let was_dirty = self.is_dirty();
2503 let old_version = self.version.clone();
2504 let mut deferred_ops = Vec::new();
2505 let buffer_ops = ops
2506 .into_iter()
2507 .filter_map(|op| match op {
2508 Operation::Buffer(op) => Some(op),
2509 _ => {
2510 if self.can_apply_op(&op) {
2511 self.apply_op(op, cx);
2512 } else {
2513 deferred_ops.push(op);
2514 }
2515 None
2516 }
2517 })
2518 .collect::<Vec<_>>();
2519 for operation in buffer_ops.iter() {
2520 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2521 }
2522 self.text.apply_ops(buffer_ops);
2523 self.deferred_ops.insert(deferred_ops);
2524 self.flush_deferred_ops(cx);
2525 self.did_edit(&old_version, was_dirty, cx);
2526 // Notify independently of whether the buffer was edited as the operations could include a
2527 // selection update.
2528 cx.notify();
2529 }
2530
2531 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2532 let mut deferred_ops = Vec::new();
2533 for op in self.deferred_ops.drain().iter().cloned() {
2534 if self.can_apply_op(&op) {
2535 self.apply_op(op, cx);
2536 } else {
2537 deferred_ops.push(op);
2538 }
2539 }
2540 self.deferred_ops.insert(deferred_ops);
2541 }
2542
2543 pub fn has_deferred_ops(&self) -> bool {
2544 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2545 }
2546
2547 fn can_apply_op(&self, operation: &Operation) -> bool {
2548 match operation {
2549 Operation::Buffer(_) => {
2550 unreachable!("buffer operations should never be applied at this layer")
2551 }
2552 Operation::UpdateDiagnostics {
2553 diagnostics: diagnostic_set,
2554 ..
2555 } => diagnostic_set.iter().all(|diagnostic| {
2556 self.text.can_resolve(&diagnostic.range.start)
2557 && self.text.can_resolve(&diagnostic.range.end)
2558 }),
2559 Operation::UpdateSelections { selections, .. } => selections
2560 .iter()
2561 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2562 Operation::UpdateCompletionTriggers { .. } => true,
2563 }
2564 }
2565
2566 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2567 match operation {
2568 Operation::Buffer(_) => {
2569 unreachable!("buffer operations should never be applied at this layer")
2570 }
2571 Operation::UpdateDiagnostics {
2572 server_id,
2573 diagnostics: diagnostic_set,
2574 lamport_timestamp,
2575 } => {
2576 let snapshot = self.snapshot();
2577 self.apply_diagnostic_update(
2578 server_id,
2579 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2580 lamport_timestamp,
2581 cx,
2582 );
2583 }
2584 Operation::UpdateSelections {
2585 selections,
2586 lamport_timestamp,
2587 line_mode,
2588 cursor_shape,
2589 } => {
2590 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2591 && set.lamport_timestamp > lamport_timestamp
2592 {
2593 return;
2594 }
2595
2596 self.remote_selections.insert(
2597 lamport_timestamp.replica_id,
2598 SelectionSet {
2599 selections,
2600 lamport_timestamp,
2601 line_mode,
2602 cursor_shape,
2603 },
2604 );
2605 self.text.lamport_clock.observe(lamport_timestamp);
2606 self.non_text_state_update_count += 1;
2607 }
2608 Operation::UpdateCompletionTriggers {
2609 triggers,
2610 lamport_timestamp,
2611 server_id,
2612 } => {
2613 if triggers.is_empty() {
2614 self.completion_triggers_per_language_server
2615 .remove(&server_id);
2616 self.completion_triggers = self
2617 .completion_triggers_per_language_server
2618 .values()
2619 .flat_map(|triggers| triggers.into_iter().cloned())
2620 .collect();
2621 } else {
2622 self.completion_triggers_per_language_server
2623 .insert(server_id, triggers.iter().cloned().collect());
2624 self.completion_triggers.extend(triggers);
2625 }
2626 self.text.lamport_clock.observe(lamport_timestamp);
2627 }
2628 }
2629 }
2630
2631 fn apply_diagnostic_update(
2632 &mut self,
2633 server_id: LanguageServerId,
2634 diagnostics: DiagnosticSet,
2635 lamport_timestamp: clock::Lamport,
2636 cx: &mut Context<Self>,
2637 ) {
2638 if lamport_timestamp > self.diagnostics_timestamp {
2639 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2640 if diagnostics.is_empty() {
2641 if let Ok(ix) = ix {
2642 self.diagnostics.remove(ix);
2643 }
2644 } else {
2645 match ix {
2646 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2647 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2648 };
2649 }
2650 self.diagnostics_timestamp = lamport_timestamp;
2651 self.non_text_state_update_count += 1;
2652 self.text.lamport_clock.observe(lamport_timestamp);
2653 cx.notify();
2654 cx.emit(BufferEvent::DiagnosticsUpdated);
2655 }
2656 }
2657
2658 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2659 self.was_changed();
2660 cx.emit(BufferEvent::Operation {
2661 operation,
2662 is_local,
2663 });
2664 }
2665
2666 /// Removes the selections for a given peer.
2667 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2668 self.remote_selections.remove(&replica_id);
2669 cx.notify();
2670 }
2671
2672 /// Undoes the most recent transaction.
2673 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2674 let was_dirty = self.is_dirty();
2675 let old_version = self.version.clone();
2676
2677 if let Some((transaction_id, operation)) = self.text.undo() {
2678 self.send_operation(Operation::Buffer(operation), true, cx);
2679 self.did_edit(&old_version, was_dirty, cx);
2680 Some(transaction_id)
2681 } else {
2682 None
2683 }
2684 }
2685
2686 /// Manually undoes a specific transaction in the buffer's undo history.
2687 pub fn undo_transaction(
2688 &mut self,
2689 transaction_id: TransactionId,
2690 cx: &mut Context<Self>,
2691 ) -> bool {
2692 let was_dirty = self.is_dirty();
2693 let old_version = self.version.clone();
2694 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2695 self.send_operation(Operation::Buffer(operation), true, cx);
2696 self.did_edit(&old_version, was_dirty, cx);
2697 true
2698 } else {
2699 false
2700 }
2701 }
2702
2703 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2704 pub fn undo_to_transaction(
2705 &mut self,
2706 transaction_id: TransactionId,
2707 cx: &mut Context<Self>,
2708 ) -> bool {
2709 let was_dirty = self.is_dirty();
2710 let old_version = self.version.clone();
2711
2712 let operations = self.text.undo_to_transaction(transaction_id);
2713 let undone = !operations.is_empty();
2714 for operation in operations {
2715 self.send_operation(Operation::Buffer(operation), true, cx);
2716 }
2717 if undone {
2718 self.did_edit(&old_version, was_dirty, cx)
2719 }
2720 undone
2721 }
2722
2723 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2724 let was_dirty = self.is_dirty();
2725 let operation = self.text.undo_operations(counts);
2726 let old_version = self.version.clone();
2727 self.send_operation(Operation::Buffer(operation), true, cx);
2728 self.did_edit(&old_version, was_dirty, cx);
2729 }
2730
2731 /// Manually redoes a specific transaction in the buffer's redo history.
2732 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2733 let was_dirty = self.is_dirty();
2734 let old_version = self.version.clone();
2735
2736 if let Some((transaction_id, operation)) = self.text.redo() {
2737 self.send_operation(Operation::Buffer(operation), true, cx);
2738 self.did_edit(&old_version, was_dirty, cx);
2739 Some(transaction_id)
2740 } else {
2741 None
2742 }
2743 }
2744
2745 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2746 pub fn redo_to_transaction(
2747 &mut self,
2748 transaction_id: TransactionId,
2749 cx: &mut Context<Self>,
2750 ) -> bool {
2751 let was_dirty = self.is_dirty();
2752 let old_version = self.version.clone();
2753
2754 let operations = self.text.redo_to_transaction(transaction_id);
2755 let redone = !operations.is_empty();
2756 for operation in operations {
2757 self.send_operation(Operation::Buffer(operation), true, cx);
2758 }
2759 if redone {
2760 self.did_edit(&old_version, was_dirty, cx)
2761 }
2762 redone
2763 }
2764
2765 /// Override current completion triggers with the user-provided completion triggers.
2766 pub fn set_completion_triggers(
2767 &mut self,
2768 server_id: LanguageServerId,
2769 triggers: BTreeSet<String>,
2770 cx: &mut Context<Self>,
2771 ) {
2772 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2773 if triggers.is_empty() {
2774 self.completion_triggers_per_language_server
2775 .remove(&server_id);
2776 self.completion_triggers = self
2777 .completion_triggers_per_language_server
2778 .values()
2779 .flat_map(|triggers| triggers.into_iter().cloned())
2780 .collect();
2781 } else {
2782 self.completion_triggers_per_language_server
2783 .insert(server_id, triggers.clone());
2784 self.completion_triggers.extend(triggers.iter().cloned());
2785 }
2786 self.send_operation(
2787 Operation::UpdateCompletionTriggers {
2788 triggers: triggers.into_iter().collect(),
2789 lamport_timestamp: self.completion_triggers_timestamp,
2790 server_id,
2791 },
2792 true,
2793 cx,
2794 );
2795 cx.notify();
2796 }
2797
2798 /// Returns a list of strings which trigger a completion menu for this language.
2799 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2800 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2801 &self.completion_triggers
2802 }
2803
2804 /// Call this directly after performing edits to prevent the preview tab
2805 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2806 /// to return false until there are additional edits.
2807 pub fn refresh_preview(&mut self) {
2808 self.preview_version = self.version.clone();
2809 }
2810
2811 /// Whether we should preserve the preview status of a tab containing this buffer.
2812 pub fn preserve_preview(&self) -> bool {
2813 !self.has_edits_since(&self.preview_version)
2814 }
2815}
2816
2817#[doc(hidden)]
2818#[cfg(any(test, feature = "test-support"))]
2819impl Buffer {
2820 pub fn edit_via_marked_text(
2821 &mut self,
2822 marked_string: &str,
2823 autoindent_mode: Option<AutoindentMode>,
2824 cx: &mut Context<Self>,
2825 ) {
2826 let edits = self.edits_for_marked_text(marked_string);
2827 self.edit(edits, autoindent_mode, cx);
2828 }
2829
2830 pub fn set_group_interval(&mut self, group_interval: Duration) {
2831 self.text.set_group_interval(group_interval);
2832 }
2833
2834 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2835 where
2836 T: rand::Rng,
2837 {
2838 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2839 let mut last_end = None;
2840 for _ in 0..old_range_count {
2841 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2842 break;
2843 }
2844
2845 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2846 let mut range = self.random_byte_range(new_start, rng);
2847 if rng.gen_bool(0.2) {
2848 mem::swap(&mut range.start, &mut range.end);
2849 }
2850 last_end = Some(range.end);
2851
2852 let new_text_len = rng.gen_range(0..10);
2853 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2854 new_text = new_text.to_uppercase();
2855
2856 edits.push((range, new_text));
2857 }
2858 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2859 self.edit(edits, None, cx);
2860 }
2861
2862 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2863 let was_dirty = self.is_dirty();
2864 let old_version = self.version.clone();
2865
2866 let ops = self.text.randomly_undo_redo(rng);
2867 if !ops.is_empty() {
2868 for op in ops {
2869 self.send_operation(Operation::Buffer(op), true, cx);
2870 self.did_edit(&old_version, was_dirty, cx);
2871 }
2872 }
2873 }
2874}
2875
2876impl EventEmitter<BufferEvent> for Buffer {}
2877
2878impl Deref for Buffer {
2879 type Target = TextBuffer;
2880
2881 fn deref(&self) -> &Self::Target {
2882 &self.text
2883 }
2884}
2885
2886impl BufferSnapshot {
2887 /// Returns [`IndentSize`] for a given line that respects user settings and
2888 /// language preferences.
2889 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2890 indent_size_for_line(self, row)
2891 }
2892
2893 /// Returns [`IndentSize`] for a given position that respects user settings
2894 /// and language preferences.
2895 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2896 let settings = language_settings(
2897 self.language_at(position).map(|l| l.name()),
2898 self.file(),
2899 cx,
2900 );
2901 if settings.hard_tabs {
2902 IndentSize::tab()
2903 } else {
2904 IndentSize::spaces(settings.tab_size.get())
2905 }
2906 }
2907
2908 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2909 /// is passed in as `single_indent_size`.
2910 pub fn suggested_indents(
2911 &self,
2912 rows: impl Iterator<Item = u32>,
2913 single_indent_size: IndentSize,
2914 ) -> BTreeMap<u32, IndentSize> {
2915 let mut result = BTreeMap::new();
2916
2917 for row_range in contiguous_ranges(rows, 10) {
2918 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2919 Some(suggestions) => suggestions,
2920 _ => break,
2921 };
2922
2923 for (row, suggestion) in row_range.zip(suggestions) {
2924 let indent_size = if let Some(suggestion) = suggestion {
2925 result
2926 .get(&suggestion.basis_row)
2927 .copied()
2928 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2929 .with_delta(suggestion.delta, single_indent_size)
2930 } else {
2931 self.indent_size_for_line(row)
2932 };
2933
2934 result.insert(row, indent_size);
2935 }
2936 }
2937
2938 result
2939 }
2940
2941 fn suggest_autoindents(
2942 &self,
2943 row_range: Range<u32>,
2944 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2945 let config = &self.language.as_ref()?.config;
2946 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2947
2948 #[derive(Debug, Clone)]
2949 struct StartPosition {
2950 start: Point,
2951 suffix: SharedString,
2952 }
2953
2954 // Find the suggested indentation ranges based on the syntax tree.
2955 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2956 let end = Point::new(row_range.end, 0);
2957 let range = (start..end).to_offset(&self.text);
2958 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2959 Some(&grammar.indents_config.as_ref()?.query)
2960 });
2961 let indent_configs = matches
2962 .grammars()
2963 .iter()
2964 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2965 .collect::<Vec<_>>();
2966
2967 let mut indent_ranges = Vec::<Range<Point>>::new();
2968 let mut start_positions = Vec::<StartPosition>::new();
2969 let mut outdent_positions = Vec::<Point>::new();
2970 while let Some(mat) = matches.peek() {
2971 let mut start: Option<Point> = None;
2972 let mut end: Option<Point> = None;
2973
2974 let config = indent_configs[mat.grammar_index];
2975 for capture in mat.captures {
2976 if capture.index == config.indent_capture_ix {
2977 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2978 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2979 } else if Some(capture.index) == config.start_capture_ix {
2980 start = Some(Point::from_ts_point(capture.node.end_position()));
2981 } else if Some(capture.index) == config.end_capture_ix {
2982 end = Some(Point::from_ts_point(capture.node.start_position()));
2983 } else if Some(capture.index) == config.outdent_capture_ix {
2984 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2985 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2986 start_positions.push(StartPosition {
2987 start: Point::from_ts_point(capture.node.start_position()),
2988 suffix: suffix.clone(),
2989 });
2990 }
2991 }
2992
2993 matches.advance();
2994 if let Some((start, end)) = start.zip(end) {
2995 if start.row == end.row {
2996 continue;
2997 }
2998 let range = start..end;
2999 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3000 Err(ix) => indent_ranges.insert(ix, range),
3001 Ok(ix) => {
3002 let prev_range = &mut indent_ranges[ix];
3003 prev_range.end = prev_range.end.max(range.end);
3004 }
3005 }
3006 }
3007 }
3008
3009 let mut error_ranges = Vec::<Range<Point>>::new();
3010 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3011 grammar.error_query.as_ref()
3012 });
3013 while let Some(mat) = matches.peek() {
3014 let node = mat.captures[0].node;
3015 let start = Point::from_ts_point(node.start_position());
3016 let end = Point::from_ts_point(node.end_position());
3017 let range = start..end;
3018 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3019 Ok(ix) | Err(ix) => ix,
3020 };
3021 let mut end_ix = ix;
3022 while let Some(existing_range) = error_ranges.get(end_ix) {
3023 if existing_range.end < end {
3024 end_ix += 1;
3025 } else {
3026 break;
3027 }
3028 }
3029 error_ranges.splice(ix..end_ix, [range]);
3030 matches.advance();
3031 }
3032
3033 outdent_positions.sort();
3034 for outdent_position in outdent_positions {
3035 // find the innermost indent range containing this outdent_position
3036 // set its end to the outdent position
3037 if let Some(range_to_truncate) = indent_ranges
3038 .iter_mut()
3039 .filter(|indent_range| indent_range.contains(&outdent_position))
3040 .next_back()
3041 {
3042 range_to_truncate.end = outdent_position;
3043 }
3044 }
3045
3046 start_positions.sort_by_key(|b| b.start);
3047
3048 // Find the suggested indentation increases and decreased based on regexes.
3049 let mut regex_outdent_map = HashMap::default();
3050 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3051 let mut start_positions_iter = start_positions.iter().peekable();
3052
3053 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3054 self.for_each_line(
3055 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3056 ..Point::new(row_range.end, 0),
3057 |row, line| {
3058 if config
3059 .decrease_indent_pattern
3060 .as_ref()
3061 .is_some_and(|regex| regex.is_match(line))
3062 {
3063 indent_change_rows.push((row, Ordering::Less));
3064 }
3065 if config
3066 .increase_indent_pattern
3067 .as_ref()
3068 .is_some_and(|regex| regex.is_match(line))
3069 {
3070 indent_change_rows.push((row + 1, Ordering::Greater));
3071 }
3072 while let Some(pos) = start_positions_iter.peek() {
3073 if pos.start.row < row {
3074 let pos = start_positions_iter.next().unwrap();
3075 last_seen_suffix
3076 .entry(pos.suffix.to_string())
3077 .or_default()
3078 .push(pos.start);
3079 } else {
3080 break;
3081 }
3082 }
3083 for rule in &config.decrease_indent_patterns {
3084 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3085 let row_start_column = self.indent_size_for_line(row).len;
3086 let basis_row = rule
3087 .valid_after
3088 .iter()
3089 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3090 .flatten()
3091 .filter(|start_point| start_point.column <= row_start_column)
3092 .max_by_key(|start_point| start_point.row);
3093 if let Some(outdent_to_row) = basis_row {
3094 regex_outdent_map.insert(row, outdent_to_row.row);
3095 }
3096 break;
3097 }
3098 }
3099 },
3100 );
3101
3102 let mut indent_changes = indent_change_rows.into_iter().peekable();
3103 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3104 prev_non_blank_row.unwrap_or(0)
3105 } else {
3106 row_range.start.saturating_sub(1)
3107 };
3108
3109 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3110 Some(row_range.map(move |row| {
3111 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3112
3113 let mut indent_from_prev_row = false;
3114 let mut outdent_from_prev_row = false;
3115 let mut outdent_to_row = u32::MAX;
3116 let mut from_regex = false;
3117
3118 while let Some((indent_row, delta)) = indent_changes.peek() {
3119 match indent_row.cmp(&row) {
3120 Ordering::Equal => match delta {
3121 Ordering::Less => {
3122 from_regex = true;
3123 outdent_from_prev_row = true
3124 }
3125 Ordering::Greater => {
3126 indent_from_prev_row = true;
3127 from_regex = true
3128 }
3129 _ => {}
3130 },
3131
3132 Ordering::Greater => break,
3133 Ordering::Less => {}
3134 }
3135
3136 indent_changes.next();
3137 }
3138
3139 for range in &indent_ranges {
3140 if range.start.row >= row {
3141 break;
3142 }
3143 if range.start.row == prev_row && range.end > row_start {
3144 indent_from_prev_row = true;
3145 }
3146 if range.end > prev_row_start && range.end <= row_start {
3147 outdent_to_row = outdent_to_row.min(range.start.row);
3148 }
3149 }
3150
3151 if let Some(basis_row) = regex_outdent_map.get(&row) {
3152 indent_from_prev_row = false;
3153 outdent_to_row = *basis_row;
3154 from_regex = true;
3155 }
3156
3157 let within_error = error_ranges
3158 .iter()
3159 .any(|e| e.start.row < row && e.end > row_start);
3160
3161 let suggestion = if outdent_to_row == prev_row
3162 || (outdent_from_prev_row && indent_from_prev_row)
3163 {
3164 Some(IndentSuggestion {
3165 basis_row: prev_row,
3166 delta: Ordering::Equal,
3167 within_error: within_error && !from_regex,
3168 })
3169 } else if indent_from_prev_row {
3170 Some(IndentSuggestion {
3171 basis_row: prev_row,
3172 delta: Ordering::Greater,
3173 within_error: within_error && !from_regex,
3174 })
3175 } else if outdent_to_row < prev_row {
3176 Some(IndentSuggestion {
3177 basis_row: outdent_to_row,
3178 delta: Ordering::Equal,
3179 within_error: within_error && !from_regex,
3180 })
3181 } else if outdent_from_prev_row {
3182 Some(IndentSuggestion {
3183 basis_row: prev_row,
3184 delta: Ordering::Less,
3185 within_error: within_error && !from_regex,
3186 })
3187 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3188 {
3189 Some(IndentSuggestion {
3190 basis_row: prev_row,
3191 delta: Ordering::Equal,
3192 within_error: within_error && !from_regex,
3193 })
3194 } else {
3195 None
3196 };
3197
3198 prev_row = row;
3199 prev_row_start = row_start;
3200 suggestion
3201 }))
3202 }
3203
3204 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3205 while row > 0 {
3206 row -= 1;
3207 if !self.is_line_blank(row) {
3208 return Some(row);
3209 }
3210 }
3211 None
3212 }
3213
3214 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3215 let captures = self.syntax.captures(range, &self.text, |grammar| {
3216 grammar.highlights_query.as_ref()
3217 });
3218 let highlight_maps = captures
3219 .grammars()
3220 .iter()
3221 .map(|grammar| grammar.highlight_map())
3222 .collect();
3223 (captures, highlight_maps)
3224 }
3225
3226 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3227 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3228 /// returned in chunks where each chunk has a single syntax highlighting style and
3229 /// diagnostic status.
3230 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3231 let range = range.start.to_offset(self)..range.end.to_offset(self);
3232
3233 let mut syntax = None;
3234 if language_aware {
3235 syntax = Some(self.get_highlights(range.clone()));
3236 }
3237 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3238 let diagnostics = language_aware;
3239 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3240 }
3241
3242 pub fn highlighted_text_for_range<T: ToOffset>(
3243 &self,
3244 range: Range<T>,
3245 override_style: Option<HighlightStyle>,
3246 syntax_theme: &SyntaxTheme,
3247 ) -> HighlightedText {
3248 HighlightedText::from_buffer_range(
3249 range,
3250 &self.text,
3251 &self.syntax,
3252 override_style,
3253 syntax_theme,
3254 )
3255 }
3256
3257 /// Invokes the given callback for each line of text in the given range of the buffer.
3258 /// Uses callback to avoid allocating a string for each line.
3259 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3260 let mut line = String::new();
3261 let mut row = range.start.row;
3262 for chunk in self
3263 .as_rope()
3264 .chunks_in_range(range.to_offset(self))
3265 .chain(["\n"])
3266 {
3267 for (newline_ix, text) in chunk.split('\n').enumerate() {
3268 if newline_ix > 0 {
3269 callback(row, &line);
3270 row += 1;
3271 line.clear();
3272 }
3273 line.push_str(text);
3274 }
3275 }
3276 }
3277
3278 /// Iterates over every [`SyntaxLayer`] in the buffer.
3279 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3280 self.syntax
3281 .layers_for_range(0..self.len(), &self.text, true)
3282 }
3283
3284 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3285 let offset = position.to_offset(self);
3286 self.syntax
3287 .layers_for_range(offset..offset, &self.text, false)
3288 .filter(|l| l.node().end_byte() > offset)
3289 .last()
3290 }
3291
3292 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3293 &self,
3294 range: Range<D>,
3295 ) -> Option<SyntaxLayer<'_>> {
3296 let range = range.to_offset(self);
3297 self.syntax
3298 .layers_for_range(range, &self.text, false)
3299 .max_by(|a, b| {
3300 if a.depth != b.depth {
3301 a.depth.cmp(&b.depth)
3302 } else if a.offset.0 != b.offset.0 {
3303 a.offset.0.cmp(&b.offset.0)
3304 } else {
3305 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3306 }
3307 })
3308 }
3309
3310 /// Returns the main [`Language`].
3311 pub fn language(&self) -> Option<&Arc<Language>> {
3312 self.language.as_ref()
3313 }
3314
3315 /// Returns the [`Language`] at the given location.
3316 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3317 self.syntax_layer_at(position)
3318 .map(|info| info.language)
3319 .or(self.language.as_ref())
3320 }
3321
3322 /// Returns the settings for the language at the given location.
3323 pub fn settings_at<'a, D: ToOffset>(
3324 &'a self,
3325 position: D,
3326 cx: &'a App,
3327 ) -> Cow<'a, LanguageSettings> {
3328 language_settings(
3329 self.language_at(position).map(|l| l.name()),
3330 self.file.as_ref(),
3331 cx,
3332 )
3333 }
3334
3335 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3336 CharClassifier::new(self.language_scope_at(point))
3337 }
3338
3339 /// Returns the [`LanguageScope`] at the given location.
3340 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3341 let offset = position.to_offset(self);
3342 let mut scope = None;
3343 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3344
3345 // Use the layer that has the smallest node intersecting the given point.
3346 for layer in self
3347 .syntax
3348 .layers_for_range(offset..offset, &self.text, false)
3349 {
3350 let mut cursor = layer.node().walk();
3351
3352 let mut range = None;
3353 loop {
3354 let child_range = cursor.node().byte_range();
3355 if !child_range.contains(&offset) {
3356 break;
3357 }
3358
3359 range = Some(child_range);
3360 if cursor.goto_first_child_for_byte(offset).is_none() {
3361 break;
3362 }
3363 }
3364
3365 if let Some(range) = range
3366 && smallest_range_and_depth.as_ref().is_none_or(
3367 |(smallest_range, smallest_range_depth)| {
3368 if layer.depth > *smallest_range_depth {
3369 true
3370 } else if layer.depth == *smallest_range_depth {
3371 range.len() < smallest_range.len()
3372 } else {
3373 false
3374 }
3375 },
3376 )
3377 {
3378 smallest_range_and_depth = Some((range, layer.depth));
3379 scope = Some(LanguageScope {
3380 language: layer.language.clone(),
3381 override_id: layer.override_id(offset, &self.text),
3382 });
3383 }
3384 }
3385
3386 scope.or_else(|| {
3387 self.language.clone().map(|language| LanguageScope {
3388 language,
3389 override_id: None,
3390 })
3391 })
3392 }
3393
3394 /// Returns a tuple of the range and character kind of the word
3395 /// surrounding the given position.
3396 pub fn surrounding_word<T: ToOffset>(
3397 &self,
3398 start: T,
3399 for_completion: bool,
3400 ) -> (Range<usize>, Option<CharKind>) {
3401 let mut start = start.to_offset(self);
3402 let mut end = start;
3403 let mut next_chars = self.chars_at(start).take(128).peekable();
3404 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3405
3406 let classifier = self
3407 .char_classifier_at(start)
3408 .for_completion(for_completion);
3409 let word_kind = cmp::max(
3410 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3411 next_chars.peek().copied().map(|c| classifier.kind(c)),
3412 );
3413
3414 for ch in prev_chars {
3415 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3416 start -= ch.len_utf8();
3417 } else {
3418 break;
3419 }
3420 }
3421
3422 for ch in next_chars {
3423 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3424 end += ch.len_utf8();
3425 } else {
3426 break;
3427 }
3428 }
3429
3430 (start..end, word_kind)
3431 }
3432
3433 /// Returns the closest syntax node enclosing the given range.
3434 pub fn syntax_ancestor<'a, T: ToOffset>(
3435 &'a self,
3436 range: Range<T>,
3437 ) -> Option<tree_sitter::Node<'a>> {
3438 let range = range.start.to_offset(self)..range.end.to_offset(self);
3439 let mut result: Option<tree_sitter::Node<'a>> = None;
3440 'outer: for layer in self
3441 .syntax
3442 .layers_for_range(range.clone(), &self.text, true)
3443 {
3444 let mut cursor = layer.node().walk();
3445
3446 // Descend to the first leaf that touches the start of the range.
3447 //
3448 // If the range is non-empty and the current node ends exactly at the start,
3449 // move to the next sibling to find a node that extends beyond the start.
3450 //
3451 // If the range is empty and the current node starts after the range position,
3452 // move to the previous sibling to find the node that contains the position.
3453 while cursor.goto_first_child_for_byte(range.start).is_some() {
3454 if !range.is_empty() && cursor.node().end_byte() == range.start {
3455 cursor.goto_next_sibling();
3456 }
3457 if range.is_empty() && cursor.node().start_byte() > range.start {
3458 cursor.goto_previous_sibling();
3459 }
3460 }
3461
3462 // Ascend to the smallest ancestor that strictly contains the range.
3463 loop {
3464 let node_range = cursor.node().byte_range();
3465 if node_range.start <= range.start
3466 && node_range.end >= range.end
3467 && node_range.len() > range.len()
3468 {
3469 break;
3470 }
3471 if !cursor.goto_parent() {
3472 continue 'outer;
3473 }
3474 }
3475
3476 let left_node = cursor.node();
3477 let mut layer_result = left_node;
3478
3479 // For an empty range, try to find another node immediately to the right of the range.
3480 if left_node.end_byte() == range.start {
3481 let mut right_node = None;
3482 while !cursor.goto_next_sibling() {
3483 if !cursor.goto_parent() {
3484 break;
3485 }
3486 }
3487
3488 while cursor.node().start_byte() == range.start {
3489 right_node = Some(cursor.node());
3490 if !cursor.goto_first_child() {
3491 break;
3492 }
3493 }
3494
3495 // If there is a candidate node on both sides of the (empty) range, then
3496 // decide between the two by favoring a named node over an anonymous token.
3497 // If both nodes are the same in that regard, favor the right one.
3498 if let Some(right_node) = right_node
3499 && (right_node.is_named() || !left_node.is_named())
3500 {
3501 layer_result = right_node;
3502 }
3503 }
3504
3505 if let Some(previous_result) = &result
3506 && previous_result.byte_range().len() < layer_result.byte_range().len()
3507 {
3508 continue;
3509 }
3510 result = Some(layer_result);
3511 }
3512
3513 result
3514 }
3515
3516 /// Returns the root syntax node within the given row
3517 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3518 let start_offset = position.to_offset(self);
3519
3520 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3521
3522 let layer = self
3523 .syntax
3524 .layers_for_range(start_offset..start_offset, &self.text, true)
3525 .next()?;
3526
3527 let mut cursor = layer.node().walk();
3528
3529 // Descend to the first leaf that touches the start of the range.
3530 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3531 if cursor.node().end_byte() == start_offset {
3532 cursor.goto_next_sibling();
3533 }
3534 }
3535
3536 // Ascend to the root node within the same row.
3537 while cursor.goto_parent() {
3538 if cursor.node().start_position().row != row {
3539 break;
3540 }
3541 }
3542
3543 Some(cursor.node())
3544 }
3545
3546 /// Returns the outline for the buffer.
3547 ///
3548 /// This method allows passing an optional [`SyntaxTheme`] to
3549 /// syntax-highlight the returned symbols.
3550 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3551 self.outline_items_containing(0..self.len(), true, theme)
3552 .map(Outline::new)
3553 }
3554
3555 /// Returns all the symbols that contain the given position.
3556 ///
3557 /// This method allows passing an optional [`SyntaxTheme`] to
3558 /// syntax-highlight the returned symbols.
3559 pub fn symbols_containing<T: ToOffset>(
3560 &self,
3561 position: T,
3562 theme: Option<&SyntaxTheme>,
3563 ) -> Option<Vec<OutlineItem<Anchor>>> {
3564 let position = position.to_offset(self);
3565 let mut items = self.outline_items_containing(
3566 position.saturating_sub(1)..self.len().min(position + 1),
3567 false,
3568 theme,
3569 )?;
3570 let mut prev_depth = None;
3571 items.retain(|item| {
3572 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3573 prev_depth = Some(item.depth);
3574 result
3575 });
3576 Some(items)
3577 }
3578
3579 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3580 let range = range.to_offset(self);
3581 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3582 grammar.outline_config.as_ref().map(|c| &c.query)
3583 });
3584 let configs = matches
3585 .grammars()
3586 .iter()
3587 .map(|g| g.outline_config.as_ref().unwrap())
3588 .collect::<Vec<_>>();
3589
3590 while let Some(mat) = matches.peek() {
3591 let config = &configs[mat.grammar_index];
3592 let containing_item_node = maybe!({
3593 let item_node = mat.captures.iter().find_map(|cap| {
3594 if cap.index == config.item_capture_ix {
3595 Some(cap.node)
3596 } else {
3597 None
3598 }
3599 })?;
3600
3601 let item_byte_range = item_node.byte_range();
3602 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3603 None
3604 } else {
3605 Some(item_node)
3606 }
3607 });
3608
3609 if let Some(item_node) = containing_item_node {
3610 return Some(
3611 Point::from_ts_point(item_node.start_position())
3612 ..Point::from_ts_point(item_node.end_position()),
3613 );
3614 }
3615
3616 matches.advance();
3617 }
3618 None
3619 }
3620
3621 pub fn outline_items_containing<T: ToOffset>(
3622 &self,
3623 range: Range<T>,
3624 include_extra_context: bool,
3625 theme: Option<&SyntaxTheme>,
3626 ) -> Option<Vec<OutlineItem<Anchor>>> {
3627 let range = range.to_offset(self);
3628 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3629 grammar.outline_config.as_ref().map(|c| &c.query)
3630 });
3631 let configs = matches
3632 .grammars()
3633 .iter()
3634 .map(|g| g.outline_config.as_ref().unwrap())
3635 .collect::<Vec<_>>();
3636
3637 let mut items = Vec::new();
3638 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3639 while let Some(mat) = matches.peek() {
3640 let config = &configs[mat.grammar_index];
3641 if let Some(item) =
3642 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3643 {
3644 items.push(item);
3645 } else if let Some(capture) = mat
3646 .captures
3647 .iter()
3648 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3649 {
3650 let capture_range = capture.node.start_position()..capture.node.end_position();
3651 let mut capture_row_range =
3652 capture_range.start.row as u32..capture_range.end.row as u32;
3653 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3654 {
3655 capture_row_range.end -= 1;
3656 }
3657 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3658 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3659 last_row_range.end = capture_row_range.end;
3660 } else {
3661 annotation_row_ranges.push(capture_row_range);
3662 }
3663 } else {
3664 annotation_row_ranges.push(capture_row_range);
3665 }
3666 }
3667 matches.advance();
3668 }
3669
3670 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3671
3672 // Assign depths based on containment relationships and convert to anchors.
3673 let mut item_ends_stack = Vec::<Point>::new();
3674 let mut anchor_items = Vec::new();
3675 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3676 for item in items {
3677 while let Some(last_end) = item_ends_stack.last().copied() {
3678 if last_end < item.range.end {
3679 item_ends_stack.pop();
3680 } else {
3681 break;
3682 }
3683 }
3684
3685 let mut annotation_row_range = None;
3686 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3687 let row_preceding_item = item.range.start.row.saturating_sub(1);
3688 if next_annotation_row_range.end < row_preceding_item {
3689 annotation_row_ranges.next();
3690 } else {
3691 if next_annotation_row_range.end == row_preceding_item {
3692 annotation_row_range = Some(next_annotation_row_range.clone());
3693 annotation_row_ranges.next();
3694 }
3695 break;
3696 }
3697 }
3698
3699 anchor_items.push(OutlineItem {
3700 depth: item_ends_stack.len(),
3701 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3702 text: item.text,
3703 highlight_ranges: item.highlight_ranges,
3704 name_ranges: item.name_ranges,
3705 body_range: item.body_range.map(|body_range| {
3706 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3707 }),
3708 annotation_range: annotation_row_range.map(|annotation_range| {
3709 self.anchor_after(Point::new(annotation_range.start, 0))
3710 ..self.anchor_before(Point::new(
3711 annotation_range.end,
3712 self.line_len(annotation_range.end),
3713 ))
3714 }),
3715 });
3716 item_ends_stack.push(item.range.end);
3717 }
3718
3719 Some(anchor_items)
3720 }
3721
3722 fn next_outline_item(
3723 &self,
3724 config: &OutlineConfig,
3725 mat: &SyntaxMapMatch,
3726 range: &Range<usize>,
3727 include_extra_context: bool,
3728 theme: Option<&SyntaxTheme>,
3729 ) -> Option<OutlineItem<Point>> {
3730 let item_node = mat.captures.iter().find_map(|cap| {
3731 if cap.index == config.item_capture_ix {
3732 Some(cap.node)
3733 } else {
3734 None
3735 }
3736 })?;
3737
3738 let item_byte_range = item_node.byte_range();
3739 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3740 return None;
3741 }
3742 let item_point_range = Point::from_ts_point(item_node.start_position())
3743 ..Point::from_ts_point(item_node.end_position());
3744
3745 let mut open_point = None;
3746 let mut close_point = None;
3747 let mut buffer_ranges = Vec::new();
3748 for capture in mat.captures {
3749 let node_is_name;
3750 if capture.index == config.name_capture_ix {
3751 node_is_name = true;
3752 } else if Some(capture.index) == config.context_capture_ix
3753 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3754 {
3755 node_is_name = false;
3756 } else {
3757 if Some(capture.index) == config.open_capture_ix {
3758 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3759 } else if Some(capture.index) == config.close_capture_ix {
3760 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3761 }
3762
3763 continue;
3764 }
3765
3766 let mut range = capture.node.start_byte()..capture.node.end_byte();
3767 let start = capture.node.start_position();
3768 if capture.node.end_position().row > start.row {
3769 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3770 }
3771
3772 if !range.is_empty() {
3773 buffer_ranges.push((range, node_is_name));
3774 }
3775 }
3776 if buffer_ranges.is_empty() {
3777 return None;
3778 }
3779 let mut text = String::new();
3780 let mut highlight_ranges = Vec::new();
3781 let mut name_ranges = Vec::new();
3782 let mut chunks = self.chunks(
3783 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3784 true,
3785 );
3786 let mut last_buffer_range_end = 0;
3787
3788 for (buffer_range, is_name) in buffer_ranges {
3789 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3790 if space_added {
3791 text.push(' ');
3792 }
3793 let before_append_len = text.len();
3794 let mut offset = buffer_range.start;
3795 chunks.seek(buffer_range.clone());
3796 for mut chunk in chunks.by_ref() {
3797 if chunk.text.len() > buffer_range.end - offset {
3798 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3799 offset = buffer_range.end;
3800 } else {
3801 offset += chunk.text.len();
3802 }
3803 let style = chunk
3804 .syntax_highlight_id
3805 .zip(theme)
3806 .and_then(|(highlight, theme)| highlight.style(theme));
3807 if let Some(style) = style {
3808 let start = text.len();
3809 let end = start + chunk.text.len();
3810 highlight_ranges.push((start..end, style));
3811 }
3812 text.push_str(chunk.text);
3813 if offset >= buffer_range.end {
3814 break;
3815 }
3816 }
3817 if is_name {
3818 let after_append_len = text.len();
3819 let start = if space_added && !name_ranges.is_empty() {
3820 before_append_len - 1
3821 } else {
3822 before_append_len
3823 };
3824 name_ranges.push(start..after_append_len);
3825 }
3826 last_buffer_range_end = buffer_range.end;
3827 }
3828
3829 Some(OutlineItem {
3830 depth: 0, // We'll calculate the depth later
3831 range: item_point_range,
3832 text,
3833 highlight_ranges,
3834 name_ranges,
3835 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3836 annotation_range: None,
3837 })
3838 }
3839
3840 pub fn function_body_fold_ranges<T: ToOffset>(
3841 &self,
3842 within: Range<T>,
3843 ) -> impl Iterator<Item = Range<usize>> + '_ {
3844 self.text_object_ranges(within, TreeSitterOptions::default())
3845 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3846 }
3847
3848 /// For each grammar in the language, runs the provided
3849 /// [`tree_sitter::Query`] against the given range.
3850 pub fn matches(
3851 &self,
3852 range: Range<usize>,
3853 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3854 ) -> SyntaxMapMatches<'_> {
3855 self.syntax.matches(range, self, query)
3856 }
3857
3858 pub fn all_bracket_ranges(
3859 &self,
3860 range: Range<usize>,
3861 ) -> impl Iterator<Item = BracketMatch> + '_ {
3862 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3863 grammar.brackets_config.as_ref().map(|c| &c.query)
3864 });
3865 let configs = matches
3866 .grammars()
3867 .iter()
3868 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3869 .collect::<Vec<_>>();
3870
3871 iter::from_fn(move || {
3872 while let Some(mat) = matches.peek() {
3873 let mut open = None;
3874 let mut close = None;
3875 let config = &configs[mat.grammar_index];
3876 let pattern = &config.patterns[mat.pattern_index];
3877 for capture in mat.captures {
3878 if capture.index == config.open_capture_ix {
3879 open = Some(capture.node.byte_range());
3880 } else if capture.index == config.close_capture_ix {
3881 close = Some(capture.node.byte_range());
3882 }
3883 }
3884
3885 matches.advance();
3886
3887 let Some((open_range, close_range)) = open.zip(close) else {
3888 continue;
3889 };
3890
3891 let bracket_range = open_range.start..=close_range.end;
3892 if !bracket_range.overlaps(&range) {
3893 continue;
3894 }
3895
3896 return Some(BracketMatch {
3897 open_range,
3898 close_range,
3899 newline_only: pattern.newline_only,
3900 });
3901 }
3902 None
3903 })
3904 }
3905
3906 /// Returns bracket range pairs overlapping or adjacent to `range`
3907 pub fn bracket_ranges<T: ToOffset>(
3908 &self,
3909 range: Range<T>,
3910 ) -> impl Iterator<Item = BracketMatch> + '_ {
3911 // Find bracket pairs that *inclusively* contain the given range.
3912 let range = range.start.to_offset(self).saturating_sub(1)
3913 ..self.len().min(range.end.to_offset(self) + 1);
3914 self.all_bracket_ranges(range)
3915 .filter(|pair| !pair.newline_only)
3916 }
3917
3918 pub fn debug_variables_query<T: ToOffset>(
3919 &self,
3920 range: Range<T>,
3921 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3922 let range = range.start.to_offset(self).saturating_sub(1)
3923 ..self.len().min(range.end.to_offset(self) + 1);
3924
3925 let mut matches = self.syntax.matches_with_options(
3926 range.clone(),
3927 &self.text,
3928 TreeSitterOptions::default(),
3929 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3930 );
3931
3932 let configs = matches
3933 .grammars()
3934 .iter()
3935 .map(|grammar| grammar.debug_variables_config.as_ref())
3936 .collect::<Vec<_>>();
3937
3938 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3939
3940 iter::from_fn(move || {
3941 loop {
3942 while let Some(capture) = captures.pop() {
3943 if capture.0.overlaps(&range) {
3944 return Some(capture);
3945 }
3946 }
3947
3948 let mat = matches.peek()?;
3949
3950 let Some(config) = configs[mat.grammar_index].as_ref() else {
3951 matches.advance();
3952 continue;
3953 };
3954
3955 for capture in mat.captures {
3956 let Some(ix) = config
3957 .objects_by_capture_ix
3958 .binary_search_by_key(&capture.index, |e| e.0)
3959 .ok()
3960 else {
3961 continue;
3962 };
3963 let text_object = config.objects_by_capture_ix[ix].1;
3964 let byte_range = capture.node.byte_range();
3965
3966 let mut found = false;
3967 for (range, existing) in captures.iter_mut() {
3968 if existing == &text_object {
3969 range.start = range.start.min(byte_range.start);
3970 range.end = range.end.max(byte_range.end);
3971 found = true;
3972 break;
3973 }
3974 }
3975
3976 if !found {
3977 captures.push((byte_range, text_object));
3978 }
3979 }
3980
3981 matches.advance();
3982 }
3983 })
3984 }
3985
3986 pub fn text_object_ranges<T: ToOffset>(
3987 &self,
3988 range: Range<T>,
3989 options: TreeSitterOptions,
3990 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3991 let range = range.start.to_offset(self).saturating_sub(1)
3992 ..self.len().min(range.end.to_offset(self) + 1);
3993
3994 let mut matches =
3995 self.syntax
3996 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3997 grammar.text_object_config.as_ref().map(|c| &c.query)
3998 });
3999
4000 let configs = matches
4001 .grammars()
4002 .iter()
4003 .map(|grammar| grammar.text_object_config.as_ref())
4004 .collect::<Vec<_>>();
4005
4006 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4007
4008 iter::from_fn(move || {
4009 loop {
4010 while let Some(capture) = captures.pop() {
4011 if capture.0.overlaps(&range) {
4012 return Some(capture);
4013 }
4014 }
4015
4016 let mat = matches.peek()?;
4017
4018 let Some(config) = configs[mat.grammar_index].as_ref() else {
4019 matches.advance();
4020 continue;
4021 };
4022
4023 for capture in mat.captures {
4024 let Some(ix) = config
4025 .text_objects_by_capture_ix
4026 .binary_search_by_key(&capture.index, |e| e.0)
4027 .ok()
4028 else {
4029 continue;
4030 };
4031 let text_object = config.text_objects_by_capture_ix[ix].1;
4032 let byte_range = capture.node.byte_range();
4033
4034 let mut found = false;
4035 for (range, existing) in captures.iter_mut() {
4036 if existing == &text_object {
4037 range.start = range.start.min(byte_range.start);
4038 range.end = range.end.max(byte_range.end);
4039 found = true;
4040 break;
4041 }
4042 }
4043
4044 if !found {
4045 captures.push((byte_range, text_object));
4046 }
4047 }
4048
4049 matches.advance();
4050 }
4051 })
4052 }
4053
4054 /// Returns enclosing bracket ranges containing the given range
4055 pub fn enclosing_bracket_ranges<T: ToOffset>(
4056 &self,
4057 range: Range<T>,
4058 ) -> impl Iterator<Item = BracketMatch> + '_ {
4059 let range = range.start.to_offset(self)..range.end.to_offset(self);
4060
4061 self.bracket_ranges(range.clone()).filter(move |pair| {
4062 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4063 })
4064 }
4065
4066 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4067 ///
4068 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4069 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4070 &self,
4071 range: Range<T>,
4072 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4073 ) -> Option<(Range<usize>, Range<usize>)> {
4074 let range = range.start.to_offset(self)..range.end.to_offset(self);
4075
4076 // Get the ranges of the innermost pair of brackets.
4077 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4078
4079 for pair in self.enclosing_bracket_ranges(range.clone()) {
4080 if let Some(range_filter) = range_filter
4081 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4082 {
4083 continue;
4084 }
4085
4086 let len = pair.close_range.end - pair.open_range.start;
4087
4088 if let Some((existing_open, existing_close)) = &result {
4089 let existing_len = existing_close.end - existing_open.start;
4090 if len > existing_len {
4091 continue;
4092 }
4093 }
4094
4095 result = Some((pair.open_range, pair.close_range));
4096 }
4097
4098 result
4099 }
4100
4101 /// Returns anchor ranges for any matches of the redaction query.
4102 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4103 /// will be run on the relevant section of the buffer.
4104 pub fn redacted_ranges<T: ToOffset>(
4105 &self,
4106 range: Range<T>,
4107 ) -> impl Iterator<Item = Range<usize>> + '_ {
4108 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4109 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4110 grammar
4111 .redactions_config
4112 .as_ref()
4113 .map(|config| &config.query)
4114 });
4115
4116 let configs = syntax_matches
4117 .grammars()
4118 .iter()
4119 .map(|grammar| grammar.redactions_config.as_ref())
4120 .collect::<Vec<_>>();
4121
4122 iter::from_fn(move || {
4123 let redacted_range = syntax_matches
4124 .peek()
4125 .and_then(|mat| {
4126 configs[mat.grammar_index].and_then(|config| {
4127 mat.captures
4128 .iter()
4129 .find(|capture| capture.index == config.redaction_capture_ix)
4130 })
4131 })
4132 .map(|mat| mat.node.byte_range());
4133 syntax_matches.advance();
4134 redacted_range
4135 })
4136 }
4137
4138 pub fn injections_intersecting_range<T: ToOffset>(
4139 &self,
4140 range: Range<T>,
4141 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4142 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4143
4144 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4145 grammar
4146 .injection_config
4147 .as_ref()
4148 .map(|config| &config.query)
4149 });
4150
4151 let configs = syntax_matches
4152 .grammars()
4153 .iter()
4154 .map(|grammar| grammar.injection_config.as_ref())
4155 .collect::<Vec<_>>();
4156
4157 iter::from_fn(move || {
4158 let ranges = syntax_matches.peek().and_then(|mat| {
4159 let config = &configs[mat.grammar_index]?;
4160 let content_capture_range = mat.captures.iter().find_map(|capture| {
4161 if capture.index == config.content_capture_ix {
4162 Some(capture.node.byte_range())
4163 } else {
4164 None
4165 }
4166 })?;
4167 let language = self.language_at(content_capture_range.start)?;
4168 Some((content_capture_range, language))
4169 });
4170 syntax_matches.advance();
4171 ranges
4172 })
4173 }
4174
4175 pub fn runnable_ranges(
4176 &self,
4177 offset_range: Range<usize>,
4178 ) -> impl Iterator<Item = RunnableRange> + '_ {
4179 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4180 grammar.runnable_config.as_ref().map(|config| &config.query)
4181 });
4182
4183 let test_configs = syntax_matches
4184 .grammars()
4185 .iter()
4186 .map(|grammar| grammar.runnable_config.as_ref())
4187 .collect::<Vec<_>>();
4188
4189 iter::from_fn(move || {
4190 loop {
4191 let mat = syntax_matches.peek()?;
4192
4193 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4194 let mut run_range = None;
4195 let full_range = mat.captures.iter().fold(
4196 Range {
4197 start: usize::MAX,
4198 end: 0,
4199 },
4200 |mut acc, next| {
4201 let byte_range = next.node.byte_range();
4202 if acc.start > byte_range.start {
4203 acc.start = byte_range.start;
4204 }
4205 if acc.end < byte_range.end {
4206 acc.end = byte_range.end;
4207 }
4208 acc
4209 },
4210 );
4211 if full_range.start > full_range.end {
4212 // We did not find a full spanning range of this match.
4213 return None;
4214 }
4215 let extra_captures: SmallVec<[_; 1]> =
4216 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4217 test_configs
4218 .extra_captures
4219 .get(capture.index as usize)
4220 .cloned()
4221 .and_then(|tag_name| match tag_name {
4222 RunnableCapture::Named(name) => {
4223 Some((capture.node.byte_range(), name))
4224 }
4225 RunnableCapture::Run => {
4226 let _ = run_range.insert(capture.node.byte_range());
4227 None
4228 }
4229 })
4230 }));
4231 let run_range = run_range?;
4232 let tags = test_configs
4233 .query
4234 .property_settings(mat.pattern_index)
4235 .iter()
4236 .filter_map(|property| {
4237 if *property.key == *"tag" {
4238 property
4239 .value
4240 .as_ref()
4241 .map(|value| RunnableTag(value.to_string().into()))
4242 } else {
4243 None
4244 }
4245 })
4246 .collect();
4247 let extra_captures = extra_captures
4248 .into_iter()
4249 .map(|(range, name)| {
4250 (
4251 name.to_string(),
4252 self.text_for_range(range.clone()).collect::<String>(),
4253 )
4254 })
4255 .collect();
4256 // All tags should have the same range.
4257 Some(RunnableRange {
4258 run_range,
4259 full_range,
4260 runnable: Runnable {
4261 tags,
4262 language: mat.language,
4263 buffer: self.remote_id(),
4264 },
4265 extra_captures,
4266 buffer_id: self.remote_id(),
4267 })
4268 });
4269
4270 syntax_matches.advance();
4271 if test_range.is_some() {
4272 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4273 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4274 return test_range;
4275 }
4276 }
4277 })
4278 }
4279
4280 /// Returns selections for remote peers intersecting the given range.
4281 #[allow(clippy::type_complexity)]
4282 pub fn selections_in_range(
4283 &self,
4284 range: Range<Anchor>,
4285 include_local: bool,
4286 ) -> impl Iterator<
4287 Item = (
4288 ReplicaId,
4289 bool,
4290 CursorShape,
4291 impl Iterator<Item = &Selection<Anchor>> + '_,
4292 ),
4293 > + '_ {
4294 self.remote_selections
4295 .iter()
4296 .filter(move |(replica_id, set)| {
4297 (include_local || **replica_id != self.text.replica_id())
4298 && !set.selections.is_empty()
4299 })
4300 .map(move |(replica_id, set)| {
4301 let start_ix = match set.selections.binary_search_by(|probe| {
4302 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4303 }) {
4304 Ok(ix) | Err(ix) => ix,
4305 };
4306 let end_ix = match set.selections.binary_search_by(|probe| {
4307 probe.start.cmp(&range.end, self).then(Ordering::Less)
4308 }) {
4309 Ok(ix) | Err(ix) => ix,
4310 };
4311
4312 (
4313 *replica_id,
4314 set.line_mode,
4315 set.cursor_shape,
4316 set.selections[start_ix..end_ix].iter(),
4317 )
4318 })
4319 }
4320
4321 /// Returns if the buffer contains any diagnostics.
4322 pub fn has_diagnostics(&self) -> bool {
4323 !self.diagnostics.is_empty()
4324 }
4325
4326 /// Returns all the diagnostics intersecting the given range.
4327 pub fn diagnostics_in_range<'a, T, O>(
4328 &'a self,
4329 search_range: Range<T>,
4330 reversed: bool,
4331 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4332 where
4333 T: 'a + Clone + ToOffset,
4334 O: 'a + FromAnchor,
4335 {
4336 let mut iterators: Vec<_> = self
4337 .diagnostics
4338 .iter()
4339 .map(|(_, collection)| {
4340 collection
4341 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4342 .peekable()
4343 })
4344 .collect();
4345
4346 std::iter::from_fn(move || {
4347 let (next_ix, _) = iterators
4348 .iter_mut()
4349 .enumerate()
4350 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4351 .min_by(|(_, a), (_, b)| {
4352 let cmp = a
4353 .range
4354 .start
4355 .cmp(&b.range.start, self)
4356 // when range is equal, sort by diagnostic severity
4357 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4358 // and stabilize order with group_id
4359 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4360 if reversed { cmp.reverse() } else { cmp }
4361 })?;
4362 iterators[next_ix]
4363 .next()
4364 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4365 diagnostic,
4366 range: FromAnchor::from_anchor(&range.start, self)
4367 ..FromAnchor::from_anchor(&range.end, self),
4368 })
4369 })
4370 }
4371
4372 /// Returns all the diagnostic groups associated with the given
4373 /// language server ID. If no language server ID is provided,
4374 /// all diagnostics groups are returned.
4375 pub fn diagnostic_groups(
4376 &self,
4377 language_server_id: Option<LanguageServerId>,
4378 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4379 let mut groups = Vec::new();
4380
4381 if let Some(language_server_id) = language_server_id {
4382 if let Ok(ix) = self
4383 .diagnostics
4384 .binary_search_by_key(&language_server_id, |e| e.0)
4385 {
4386 self.diagnostics[ix]
4387 .1
4388 .groups(language_server_id, &mut groups, self);
4389 }
4390 } else {
4391 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4392 diagnostics.groups(*language_server_id, &mut groups, self);
4393 }
4394 }
4395
4396 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4397 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4398 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4399 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4400 });
4401
4402 groups
4403 }
4404
4405 /// Returns an iterator over the diagnostics for the given group.
4406 pub fn diagnostic_group<O>(
4407 &self,
4408 group_id: usize,
4409 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4410 where
4411 O: FromAnchor + 'static,
4412 {
4413 self.diagnostics
4414 .iter()
4415 .flat_map(move |(_, set)| set.group(group_id, self))
4416 }
4417
4418 /// An integer version number that accounts for all updates besides
4419 /// the buffer's text itself (which is versioned via a version vector).
4420 pub fn non_text_state_update_count(&self) -> usize {
4421 self.non_text_state_update_count
4422 }
4423
4424 /// An integer version that changes when the buffer's syntax changes.
4425 pub fn syntax_update_count(&self) -> usize {
4426 self.syntax.update_count()
4427 }
4428
4429 /// Returns a snapshot of underlying file.
4430 pub fn file(&self) -> Option<&Arc<dyn File>> {
4431 self.file.as_ref()
4432 }
4433
4434 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4435 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4436 if let Some(file) = self.file() {
4437 if file.path().file_name().is_none() || include_root {
4438 Some(file.full_path(cx))
4439 } else {
4440 Some(file.path().to_path_buf())
4441 }
4442 } else {
4443 None
4444 }
4445 }
4446
4447 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4448 let query_str = query.fuzzy_contents;
4449 if query_str.is_some_and(|query| query.is_empty()) {
4450 return BTreeMap::default();
4451 }
4452
4453 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4454 language,
4455 override_id: None,
4456 }));
4457
4458 let mut query_ix = 0;
4459 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4460 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4461
4462 let mut words = BTreeMap::default();
4463 let mut current_word_start_ix = None;
4464 let mut chunk_ix = query.range.start;
4465 for chunk in self.chunks(query.range, false) {
4466 for (i, c) in chunk.text.char_indices() {
4467 let ix = chunk_ix + i;
4468 if classifier.is_word(c) {
4469 if current_word_start_ix.is_none() {
4470 current_word_start_ix = Some(ix);
4471 }
4472
4473 if let Some(query_chars) = &query_chars
4474 && query_ix < query_len
4475 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4476 {
4477 query_ix += 1;
4478 }
4479 continue;
4480 } else if let Some(word_start) = current_word_start_ix.take()
4481 && query_ix == query_len
4482 {
4483 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4484 let mut word_text = self.text_for_range(word_start..ix).peekable();
4485 let first_char = word_text
4486 .peek()
4487 .and_then(|first_chunk| first_chunk.chars().next());
4488 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4489 if !query.skip_digits
4490 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4491 {
4492 words.insert(word_text.collect(), word_range);
4493 }
4494 }
4495 query_ix = 0;
4496 }
4497 chunk_ix += chunk.text.len();
4498 }
4499
4500 words
4501 }
4502}
4503
4504pub struct WordsQuery<'a> {
4505 /// Only returns words with all chars from the fuzzy string in them.
4506 pub fuzzy_contents: Option<&'a str>,
4507 /// Skips words that start with a digit.
4508 pub skip_digits: bool,
4509 /// Buffer offset range, to look for words.
4510 pub range: Range<usize>,
4511}
4512
4513fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4514 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4515}
4516
4517fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4518 let mut result = IndentSize::spaces(0);
4519 for c in text {
4520 let kind = match c {
4521 ' ' => IndentKind::Space,
4522 '\t' => IndentKind::Tab,
4523 _ => break,
4524 };
4525 if result.len == 0 {
4526 result.kind = kind;
4527 }
4528 result.len += 1;
4529 }
4530 result
4531}
4532
4533impl Clone for BufferSnapshot {
4534 fn clone(&self) -> Self {
4535 Self {
4536 text: self.text.clone(),
4537 syntax: self.syntax.clone(),
4538 file: self.file.clone(),
4539 remote_selections: self.remote_selections.clone(),
4540 diagnostics: self.diagnostics.clone(),
4541 language: self.language.clone(),
4542 non_text_state_update_count: self.non_text_state_update_count,
4543 }
4544 }
4545}
4546
4547impl Deref for BufferSnapshot {
4548 type Target = text::BufferSnapshot;
4549
4550 fn deref(&self) -> &Self::Target {
4551 &self.text
4552 }
4553}
4554
4555unsafe impl Send for BufferChunks<'_> {}
4556
4557impl<'a> BufferChunks<'a> {
4558 pub(crate) fn new(
4559 text: &'a Rope,
4560 range: Range<usize>,
4561 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4562 diagnostics: bool,
4563 buffer_snapshot: Option<&'a BufferSnapshot>,
4564 ) -> Self {
4565 let mut highlights = None;
4566 if let Some((captures, highlight_maps)) = syntax {
4567 highlights = Some(BufferChunkHighlights {
4568 captures,
4569 next_capture: None,
4570 stack: Default::default(),
4571 highlight_maps,
4572 })
4573 }
4574
4575 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4576 let chunks = text.chunks_in_range(range.clone());
4577
4578 let mut this = BufferChunks {
4579 range,
4580 buffer_snapshot,
4581 chunks,
4582 diagnostic_endpoints,
4583 error_depth: 0,
4584 warning_depth: 0,
4585 information_depth: 0,
4586 hint_depth: 0,
4587 unnecessary_depth: 0,
4588 underline: true,
4589 highlights,
4590 };
4591 this.initialize_diagnostic_endpoints();
4592 this
4593 }
4594
4595 /// Seeks to the given byte offset in the buffer.
4596 pub fn seek(&mut self, range: Range<usize>) {
4597 let old_range = std::mem::replace(&mut self.range, range.clone());
4598 self.chunks.set_range(self.range.clone());
4599 if let Some(highlights) = self.highlights.as_mut() {
4600 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4601 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4602 highlights
4603 .stack
4604 .retain(|(end_offset, _)| *end_offset > range.start);
4605 if let Some(capture) = &highlights.next_capture
4606 && range.start >= capture.node.start_byte()
4607 {
4608 let next_capture_end = capture.node.end_byte();
4609 if range.start < next_capture_end {
4610 highlights.stack.push((
4611 next_capture_end,
4612 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4613 ));
4614 }
4615 highlights.next_capture.take();
4616 }
4617 } else if let Some(snapshot) = self.buffer_snapshot {
4618 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4619 *highlights = BufferChunkHighlights {
4620 captures,
4621 next_capture: None,
4622 stack: Default::default(),
4623 highlight_maps,
4624 };
4625 } else {
4626 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4627 // Seeking such BufferChunks is not supported.
4628 debug_assert!(
4629 false,
4630 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4631 );
4632 }
4633
4634 highlights.captures.set_byte_range(self.range.clone());
4635 self.initialize_diagnostic_endpoints();
4636 }
4637 }
4638
4639 fn initialize_diagnostic_endpoints(&mut self) {
4640 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4641 && let Some(buffer) = self.buffer_snapshot
4642 {
4643 let mut diagnostic_endpoints = Vec::new();
4644 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4645 diagnostic_endpoints.push(DiagnosticEndpoint {
4646 offset: entry.range.start,
4647 is_start: true,
4648 severity: entry.diagnostic.severity,
4649 is_unnecessary: entry.diagnostic.is_unnecessary,
4650 underline: entry.diagnostic.underline,
4651 });
4652 diagnostic_endpoints.push(DiagnosticEndpoint {
4653 offset: entry.range.end,
4654 is_start: false,
4655 severity: entry.diagnostic.severity,
4656 is_unnecessary: entry.diagnostic.is_unnecessary,
4657 underline: entry.diagnostic.underline,
4658 });
4659 }
4660 diagnostic_endpoints
4661 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4662 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4663 self.hint_depth = 0;
4664 self.error_depth = 0;
4665 self.warning_depth = 0;
4666 self.information_depth = 0;
4667 }
4668 }
4669
4670 /// The current byte offset in the buffer.
4671 pub fn offset(&self) -> usize {
4672 self.range.start
4673 }
4674
4675 pub fn range(&self) -> Range<usize> {
4676 self.range.clone()
4677 }
4678
4679 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4680 let depth = match endpoint.severity {
4681 DiagnosticSeverity::ERROR => &mut self.error_depth,
4682 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4683 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4684 DiagnosticSeverity::HINT => &mut self.hint_depth,
4685 _ => return,
4686 };
4687 if endpoint.is_start {
4688 *depth += 1;
4689 } else {
4690 *depth -= 1;
4691 }
4692
4693 if endpoint.is_unnecessary {
4694 if endpoint.is_start {
4695 self.unnecessary_depth += 1;
4696 } else {
4697 self.unnecessary_depth -= 1;
4698 }
4699 }
4700 }
4701
4702 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4703 if self.error_depth > 0 {
4704 Some(DiagnosticSeverity::ERROR)
4705 } else if self.warning_depth > 0 {
4706 Some(DiagnosticSeverity::WARNING)
4707 } else if self.information_depth > 0 {
4708 Some(DiagnosticSeverity::INFORMATION)
4709 } else if self.hint_depth > 0 {
4710 Some(DiagnosticSeverity::HINT)
4711 } else {
4712 None
4713 }
4714 }
4715
4716 fn current_code_is_unnecessary(&self) -> bool {
4717 self.unnecessary_depth > 0
4718 }
4719}
4720
4721impl<'a> Iterator for BufferChunks<'a> {
4722 type Item = Chunk<'a>;
4723
4724 fn next(&mut self) -> Option<Self::Item> {
4725 let mut next_capture_start = usize::MAX;
4726 let mut next_diagnostic_endpoint = usize::MAX;
4727
4728 if let Some(highlights) = self.highlights.as_mut() {
4729 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4730 if *parent_capture_end <= self.range.start {
4731 highlights.stack.pop();
4732 } else {
4733 break;
4734 }
4735 }
4736
4737 if highlights.next_capture.is_none() {
4738 highlights.next_capture = highlights.captures.next();
4739 }
4740
4741 while let Some(capture) = highlights.next_capture.as_ref() {
4742 if self.range.start < capture.node.start_byte() {
4743 next_capture_start = capture.node.start_byte();
4744 break;
4745 } else {
4746 let highlight_id =
4747 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4748 highlights
4749 .stack
4750 .push((capture.node.end_byte(), highlight_id));
4751 highlights.next_capture = highlights.captures.next();
4752 }
4753 }
4754 }
4755
4756 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4757 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4758 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4759 if endpoint.offset <= self.range.start {
4760 self.update_diagnostic_depths(endpoint);
4761 diagnostic_endpoints.next();
4762 self.underline = endpoint.underline;
4763 } else {
4764 next_diagnostic_endpoint = endpoint.offset;
4765 break;
4766 }
4767 }
4768 }
4769 self.diagnostic_endpoints = diagnostic_endpoints;
4770
4771 if let Some(chunk) = self.chunks.peek() {
4772 let chunk_start = self.range.start;
4773 let mut chunk_end = (self.chunks.offset() + chunk.len())
4774 .min(next_capture_start)
4775 .min(next_diagnostic_endpoint);
4776 let mut highlight_id = None;
4777 if let Some(highlights) = self.highlights.as_ref()
4778 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4779 {
4780 chunk_end = chunk_end.min(*parent_capture_end);
4781 highlight_id = Some(*parent_highlight_id);
4782 }
4783
4784 let slice =
4785 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4786 self.range.start = chunk_end;
4787 if self.range.start == self.chunks.offset() + chunk.len() {
4788 self.chunks.next().unwrap();
4789 }
4790
4791 Some(Chunk {
4792 text: slice,
4793 syntax_highlight_id: highlight_id,
4794 underline: self.underline,
4795 diagnostic_severity: self.current_diagnostic_severity(),
4796 is_unnecessary: self.current_code_is_unnecessary(),
4797 ..Chunk::default()
4798 })
4799 } else {
4800 None
4801 }
4802 }
4803}
4804
4805impl operation_queue::Operation for Operation {
4806 fn lamport_timestamp(&self) -> clock::Lamport {
4807 match self {
4808 Operation::Buffer(_) => {
4809 unreachable!("buffer operations should never be deferred at this layer")
4810 }
4811 Operation::UpdateDiagnostics {
4812 lamport_timestamp, ..
4813 }
4814 | Operation::UpdateSelections {
4815 lamport_timestamp, ..
4816 }
4817 | Operation::UpdateCompletionTriggers {
4818 lamport_timestamp, ..
4819 } => *lamport_timestamp,
4820 }
4821 }
4822}
4823
4824impl Default for Diagnostic {
4825 fn default() -> Self {
4826 Self {
4827 source: Default::default(),
4828 source_kind: DiagnosticSourceKind::Other,
4829 code: None,
4830 code_description: None,
4831 severity: DiagnosticSeverity::ERROR,
4832 message: Default::default(),
4833 markdown: None,
4834 group_id: 0,
4835 is_primary: false,
4836 is_disk_based: false,
4837 is_unnecessary: false,
4838 underline: true,
4839 data: None,
4840 }
4841 }
4842}
4843
4844impl IndentSize {
4845 /// Returns an [`IndentSize`] representing the given spaces.
4846 pub fn spaces(len: u32) -> Self {
4847 Self {
4848 len,
4849 kind: IndentKind::Space,
4850 }
4851 }
4852
4853 /// Returns an [`IndentSize`] representing a tab.
4854 pub fn tab() -> Self {
4855 Self {
4856 len: 1,
4857 kind: IndentKind::Tab,
4858 }
4859 }
4860
4861 /// An iterator over the characters represented by this [`IndentSize`].
4862 pub fn chars(&self) -> impl Iterator<Item = char> {
4863 iter::repeat(self.char()).take(self.len as usize)
4864 }
4865
4866 /// The character representation of this [`IndentSize`].
4867 pub fn char(&self) -> char {
4868 match self.kind {
4869 IndentKind::Space => ' ',
4870 IndentKind::Tab => '\t',
4871 }
4872 }
4873
4874 /// Consumes the current [`IndentSize`] and returns a new one that has
4875 /// been shrunk or enlarged by the given size along the given direction.
4876 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4877 match direction {
4878 Ordering::Less => {
4879 if self.kind == size.kind && self.len >= size.len {
4880 self.len -= size.len;
4881 }
4882 }
4883 Ordering::Equal => {}
4884 Ordering::Greater => {
4885 if self.len == 0 {
4886 self = size;
4887 } else if self.kind == size.kind {
4888 self.len += size.len;
4889 }
4890 }
4891 }
4892 self
4893 }
4894
4895 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4896 match self.kind {
4897 IndentKind::Space => self.len as usize,
4898 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4899 }
4900 }
4901}
4902
4903#[cfg(any(test, feature = "test-support"))]
4904pub struct TestFile {
4905 pub path: Arc<Path>,
4906 pub root_name: String,
4907 pub local_root: Option<PathBuf>,
4908}
4909
4910#[cfg(any(test, feature = "test-support"))]
4911impl File for TestFile {
4912 fn path(&self) -> &Arc<Path> {
4913 &self.path
4914 }
4915
4916 fn full_path(&self, _: &gpui::App) -> PathBuf {
4917 PathBuf::from(&self.root_name).join(self.path.as_ref())
4918 }
4919
4920 fn as_local(&self) -> Option<&dyn LocalFile> {
4921 if self.local_root.is_some() {
4922 Some(self)
4923 } else {
4924 None
4925 }
4926 }
4927
4928 fn disk_state(&self) -> DiskState {
4929 unimplemented!()
4930 }
4931
4932 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4933 self.path().file_name().unwrap_or(self.root_name.as_ref())
4934 }
4935
4936 fn worktree_id(&self, _: &App) -> WorktreeId {
4937 WorktreeId::from_usize(0)
4938 }
4939
4940 fn to_proto(&self, _: &App) -> rpc::proto::File {
4941 unimplemented!()
4942 }
4943
4944 fn is_private(&self) -> bool {
4945 false
4946 }
4947}
4948
4949#[cfg(any(test, feature = "test-support"))]
4950impl LocalFile for TestFile {
4951 fn abs_path(&self, _cx: &App) -> PathBuf {
4952 PathBuf::from(self.local_root.as_ref().unwrap())
4953 .join(&self.root_name)
4954 .join(self.path.as_ref())
4955 }
4956
4957 fn load(&self, _cx: &App) -> Task<Result<String>> {
4958 unimplemented!()
4959 }
4960
4961 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4962 unimplemented!()
4963 }
4964}
4965
4966pub(crate) fn contiguous_ranges(
4967 values: impl Iterator<Item = u32>,
4968 max_len: usize,
4969) -> impl Iterator<Item = Range<u32>> {
4970 let mut values = values;
4971 let mut current_range: Option<Range<u32>> = None;
4972 std::iter::from_fn(move || {
4973 loop {
4974 if let Some(value) = values.next() {
4975 if let Some(range) = &mut current_range
4976 && value == range.end
4977 && range.len() < max_len
4978 {
4979 range.end += 1;
4980 continue;
4981 }
4982
4983 let prev_range = current_range.clone();
4984 current_range = Some(value..(value + 1));
4985 if prev_range.is_some() {
4986 return prev_range;
4987 }
4988 } else {
4989 return current_range.take();
4990 }
4991 }
4992 })
4993}
4994
4995#[derive(Default, Debug)]
4996pub struct CharClassifier {
4997 scope: Option<LanguageScope>,
4998 for_completion: bool,
4999 ignore_punctuation: bool,
5000}
5001
5002impl CharClassifier {
5003 pub fn new(scope: Option<LanguageScope>) -> Self {
5004 Self {
5005 scope,
5006 for_completion: false,
5007 ignore_punctuation: false,
5008 }
5009 }
5010
5011 pub fn for_completion(self, for_completion: bool) -> Self {
5012 Self {
5013 for_completion,
5014 ..self
5015 }
5016 }
5017
5018 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5019 Self {
5020 ignore_punctuation,
5021 ..self
5022 }
5023 }
5024
5025 pub fn is_whitespace(&self, c: char) -> bool {
5026 self.kind(c) == CharKind::Whitespace
5027 }
5028
5029 pub fn is_word(&self, c: char) -> bool {
5030 self.kind(c) == CharKind::Word
5031 }
5032
5033 pub fn is_punctuation(&self, c: char) -> bool {
5034 self.kind(c) == CharKind::Punctuation
5035 }
5036
5037 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5038 if c.is_alphanumeric() || c == '_' {
5039 return CharKind::Word;
5040 }
5041
5042 if let Some(scope) = &self.scope {
5043 let characters = if self.for_completion {
5044 scope.completion_query_characters()
5045 } else {
5046 scope.word_characters()
5047 };
5048 if let Some(characters) = characters
5049 && characters.contains(&c)
5050 {
5051 return CharKind::Word;
5052 }
5053 }
5054
5055 if c.is_whitespace() {
5056 return CharKind::Whitespace;
5057 }
5058
5059 if ignore_punctuation {
5060 CharKind::Word
5061 } else {
5062 CharKind::Punctuation
5063 }
5064 }
5065
5066 pub fn kind(&self, c: char) -> CharKind {
5067 self.kind_with(c, self.ignore_punctuation)
5068 }
5069}
5070
5071/// Find all of the ranges of whitespace that occur at the ends of lines
5072/// in the given rope.
5073///
5074/// This could also be done with a regex search, but this implementation
5075/// avoids copying text.
5076pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5077 let mut ranges = Vec::new();
5078
5079 let mut offset = 0;
5080 let mut prev_chunk_trailing_whitespace_range = 0..0;
5081 for chunk in rope.chunks() {
5082 let mut prev_line_trailing_whitespace_range = 0..0;
5083 for (i, line) in chunk.split('\n').enumerate() {
5084 let line_end_offset = offset + line.len();
5085 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5086 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5087
5088 if i == 0 && trimmed_line_len == 0 {
5089 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5090 }
5091 if !prev_line_trailing_whitespace_range.is_empty() {
5092 ranges.push(prev_line_trailing_whitespace_range);
5093 }
5094
5095 offset = line_end_offset + 1;
5096 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5097 }
5098
5099 offset -= 1;
5100 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5101 }
5102
5103 if !prev_chunk_trailing_whitespace_range.is_empty() {
5104 ranges.push(prev_chunk_trailing_whitespace_range);
5105 }
5106
5107 ranges
5108}