1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation
1162 && let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1188 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1189 {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193
1194 self.apply_ops([operation.clone()], cx);
1195
1196 if let Some(timestamp) = operation_to_undo {
1197 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1198 self.undo_operations(counts, cx);
1199 }
1200 }
1201
1202 #[cfg(test)]
1203 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1204 &self.text
1205 }
1206
1207 /// Retrieve a snapshot of the buffer's raw text, without any
1208 /// language-related state like the syntax tree or diagnostics.
1209 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1210 self.text.snapshot()
1211 }
1212
1213 /// The file associated with the buffer, if any.
1214 pub fn file(&self) -> Option<&Arc<dyn File>> {
1215 self.file.as_ref()
1216 }
1217
1218 /// The version of the buffer that was last saved or reloaded from disk.
1219 pub fn saved_version(&self) -> &clock::Global {
1220 &self.saved_version
1221 }
1222
1223 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1224 pub fn saved_mtime(&self) -> Option<MTime> {
1225 self.saved_mtime
1226 }
1227
1228 /// Assign a language to the buffer.
1229 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1230 self.non_text_state_update_count += 1;
1231 self.syntax_map.lock().clear(&self.text);
1232 self.language = language;
1233 self.was_changed();
1234 self.reparse(cx);
1235 cx.emit(BufferEvent::LanguageChanged);
1236 }
1237
1238 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1239 /// other languages if parts of the buffer are written in different languages.
1240 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1241 self.syntax_map
1242 .lock()
1243 .set_language_registry(language_registry);
1244 }
1245
1246 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1247 self.syntax_map.lock().language_registry()
1248 }
1249
1250 /// Assign the buffer a new [`Capability`].
1251 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1252 self.capability = capability;
1253 cx.emit(BufferEvent::CapabilityChanged)
1254 }
1255
1256 /// This method is called to signal that the buffer has been saved.
1257 pub fn did_save(
1258 &mut self,
1259 version: clock::Global,
1260 mtime: Option<MTime>,
1261 cx: &mut Context<Self>,
1262 ) {
1263 self.saved_version = version;
1264 self.has_unsaved_edits
1265 .set((self.saved_version().clone(), false));
1266 self.has_conflict = false;
1267 self.saved_mtime = mtime;
1268 self.was_changed();
1269 cx.emit(BufferEvent::Saved);
1270 cx.notify();
1271 }
1272
1273 /// This method is called to signal that the buffer has been discarded.
1274 pub fn discarded(&self, cx: &mut Context<Self>) {
1275 cx.emit(BufferEvent::Discarded);
1276 cx.notify();
1277 }
1278
1279 /// Reloads the contents of the buffer from disk.
1280 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1281 let (tx, rx) = futures::channel::oneshot::channel();
1282 let prev_version = self.text.version();
1283 self.reload_task = Some(cx.spawn(async move |this, cx| {
1284 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1285 let file = this.file.as_ref()?.as_local()?;
1286
1287 Some((file.disk_state().mtime(), file.load(cx)))
1288 })?
1289 else {
1290 return Ok(());
1291 };
1292
1293 let new_text = new_text.await?;
1294 let diff = this
1295 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1296 .await;
1297 this.update(cx, |this, cx| {
1298 if this.version() == diff.base_version {
1299 this.finalize_last_transaction();
1300 this.apply_diff(diff, cx);
1301 tx.send(this.finalize_last_transaction().cloned()).ok();
1302 this.has_conflict = false;
1303 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1304 } else {
1305 if !diff.edits.is_empty()
1306 || this
1307 .edits_since::<usize>(&diff.base_version)
1308 .next()
1309 .is_some()
1310 {
1311 this.has_conflict = true;
1312 }
1313
1314 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1315 }
1316
1317 this.reload_task.take();
1318 })
1319 }));
1320 rx
1321 }
1322
1323 /// This method is called to signal that the buffer has been reloaded.
1324 pub fn did_reload(
1325 &mut self,
1326 version: clock::Global,
1327 line_ending: LineEnding,
1328 mtime: Option<MTime>,
1329 cx: &mut Context<Self>,
1330 ) {
1331 self.saved_version = version;
1332 self.has_unsaved_edits
1333 .set((self.saved_version.clone(), false));
1334 self.text.set_line_ending(line_ending);
1335 self.saved_mtime = mtime;
1336 cx.emit(BufferEvent::Reloaded);
1337 cx.notify();
1338 }
1339
1340 /// Updates the [`File`] backing this buffer. This should be called when
1341 /// the file has changed or has been deleted.
1342 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1343 let was_dirty = self.is_dirty();
1344 let mut file_changed = false;
1345
1346 if let Some(old_file) = self.file.as_ref() {
1347 if new_file.path() != old_file.path() {
1348 file_changed = true;
1349 }
1350
1351 let old_state = old_file.disk_state();
1352 let new_state = new_file.disk_state();
1353 if old_state != new_state {
1354 file_changed = true;
1355 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1356 cx.emit(BufferEvent::ReloadNeeded)
1357 }
1358 }
1359 } else {
1360 file_changed = true;
1361 };
1362
1363 self.file = Some(new_file);
1364 if file_changed {
1365 self.was_changed();
1366 self.non_text_state_update_count += 1;
1367 if was_dirty != self.is_dirty() {
1368 cx.emit(BufferEvent::DirtyChanged);
1369 }
1370 cx.emit(BufferEvent::FileHandleChanged);
1371 cx.notify();
1372 }
1373 }
1374
1375 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1376 Some(self.branch_state.as_ref()?.base_buffer.clone())
1377 }
1378
1379 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1380 pub fn language(&self) -> Option<&Arc<Language>> {
1381 self.language.as_ref()
1382 }
1383
1384 /// Returns the [`Language`] at the given location.
1385 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1386 let offset = position.to_offset(self);
1387 let mut is_first = true;
1388 let start_anchor = self.anchor_before(offset);
1389 let end_anchor = self.anchor_after(offset);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .filter(|layer| {
1394 if is_first {
1395 is_first = false;
1396 return true;
1397 }
1398 let any_sub_ranges_contain_range = layer
1399 .included_sub_ranges
1400 .map(|sub_ranges| {
1401 sub_ranges.iter().any(|sub_range| {
1402 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1403 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1404 !is_before_start && !is_after_end
1405 })
1406 })
1407 .unwrap_or(true);
1408 let result = any_sub_ranges_contain_range;
1409 return result;
1410 })
1411 .last()
1412 .map(|info| info.language.clone())
1413 .or_else(|| self.language.clone())
1414 }
1415
1416 /// Returns each [`Language`] for the active syntax layers at the given location.
1417 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1418 let offset = position.to_offset(self);
1419 let mut languages: Vec<Arc<Language>> = self
1420 .syntax_map
1421 .lock()
1422 .layers_for_range(offset..offset, &self.text, false)
1423 .map(|info| info.language.clone())
1424 .collect();
1425
1426 if languages.is_empty()
1427 && let Some(buffer_language) = self.language()
1428 {
1429 languages.push(buffer_language.clone());
1430 }
1431
1432 languages
1433 }
1434
1435 /// An integer version number that accounts for all updates besides
1436 /// the buffer's text itself (which is versioned via a version vector).
1437 pub fn non_text_state_update_count(&self) -> usize {
1438 self.non_text_state_update_count
1439 }
1440
1441 /// Whether the buffer is being parsed in the background.
1442 #[cfg(any(test, feature = "test-support"))]
1443 pub fn is_parsing(&self) -> bool {
1444 self.reparse.is_some()
1445 }
1446
1447 /// Indicates whether the buffer contains any regions that may be
1448 /// written in a language that hasn't been loaded yet.
1449 pub fn contains_unknown_injections(&self) -> bool {
1450 self.syntax_map.lock().contains_unknown_injections()
1451 }
1452
1453 #[cfg(any(test, feature = "test-support"))]
1454 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1455 self.sync_parse_timeout = timeout;
1456 }
1457
1458 /// Called after an edit to synchronize the buffer's main parse tree with
1459 /// the buffer's new underlying state.
1460 ///
1461 /// Locks the syntax map and interpolates the edits since the last reparse
1462 /// into the foreground syntax tree.
1463 ///
1464 /// Then takes a stable snapshot of the syntax map before unlocking it.
1465 /// The snapshot with the interpolated edits is sent to a background thread,
1466 /// where we ask Tree-sitter to perform an incremental parse.
1467 ///
1468 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1469 /// waiting on the parse to complete. As soon as it completes, we proceed
1470 /// synchronously, unless a 1ms timeout elapses.
1471 ///
1472 /// If we time out waiting on the parse, we spawn a second task waiting
1473 /// until the parse does complete and return with the interpolated tree still
1474 /// in the foreground. When the background parse completes, call back into
1475 /// the main thread and assign the foreground parse state.
1476 ///
1477 /// If the buffer or grammar changed since the start of the background parse,
1478 /// initiate an additional reparse recursively. To avoid concurrent parses
1479 /// for the same buffer, we only initiate a new parse if we are not already
1480 /// parsing in the background.
1481 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1482 if self.reparse.is_some() {
1483 return;
1484 }
1485 let language = if let Some(language) = self.language.clone() {
1486 language
1487 } else {
1488 return;
1489 };
1490
1491 let text = self.text_snapshot();
1492 let parsed_version = self.version();
1493
1494 let mut syntax_map = self.syntax_map.lock();
1495 syntax_map.interpolate(&text);
1496 let language_registry = syntax_map.language_registry();
1497 let mut syntax_snapshot = syntax_map.snapshot();
1498 drop(syntax_map);
1499
1500 let parse_task = cx.background_spawn({
1501 let language = language.clone();
1502 let language_registry = language_registry.clone();
1503 async move {
1504 syntax_snapshot.reparse(&text, language_registry, language);
1505 syntax_snapshot
1506 }
1507 });
1508
1509 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1510 match cx
1511 .background_executor()
1512 .block_with_timeout(self.sync_parse_timeout, parse_task)
1513 {
1514 Ok(new_syntax_snapshot) => {
1515 self.did_finish_parsing(new_syntax_snapshot, cx);
1516 self.reparse = None;
1517 }
1518 Err(parse_task) => {
1519 self.reparse = Some(cx.spawn(async move |this, cx| {
1520 let new_syntax_map = parse_task.await;
1521 this.update(cx, move |this, cx| {
1522 let grammar_changed =
1523 this.language.as_ref().map_or(true, |current_language| {
1524 !Arc::ptr_eq(&language, current_language)
1525 });
1526 let language_registry_changed = new_syntax_map
1527 .contains_unknown_injections()
1528 && language_registry.map_or(false, |registry| {
1529 registry.version() != new_syntax_map.language_registry_version()
1530 });
1531 let parse_again = language_registry_changed
1532 || grammar_changed
1533 || this.version.changed_since(&parsed_version);
1534 this.did_finish_parsing(new_syntax_map, cx);
1535 this.reparse = None;
1536 if parse_again {
1537 this.reparse(cx);
1538 }
1539 })
1540 .ok();
1541 }));
1542 }
1543 }
1544 }
1545
1546 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1547 self.was_changed();
1548 self.non_text_state_update_count += 1;
1549 self.syntax_map.lock().did_parse(syntax_snapshot);
1550 self.request_autoindent(cx);
1551 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1552 cx.emit(BufferEvent::Reparsed);
1553 cx.notify();
1554 }
1555
1556 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1557 self.parse_status.1.clone()
1558 }
1559
1560 /// Assign to the buffer a set of diagnostics created by a given language server.
1561 pub fn update_diagnostics(
1562 &mut self,
1563 server_id: LanguageServerId,
1564 diagnostics: DiagnosticSet,
1565 cx: &mut Context<Self>,
1566 ) {
1567 let lamport_timestamp = self.text.lamport_clock.tick();
1568 let op = Operation::UpdateDiagnostics {
1569 server_id,
1570 diagnostics: diagnostics.iter().cloned().collect(),
1571 lamport_timestamp,
1572 };
1573
1574 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1575 self.send_operation(op, true, cx);
1576 }
1577
1578 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1579 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1580 return None;
1581 };
1582 Some(&self.diagnostics[idx].1)
1583 }
1584
1585 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1586 if let Some(indent_sizes) = self.compute_autoindents() {
1587 let indent_sizes = cx.background_spawn(indent_sizes);
1588 match cx
1589 .background_executor()
1590 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1591 {
1592 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1593 Err(indent_sizes) => {
1594 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1595 let indent_sizes = indent_sizes.await;
1596 this.update(cx, |this, cx| {
1597 this.apply_autoindents(indent_sizes, cx);
1598 })
1599 .ok();
1600 }));
1601 }
1602 }
1603 } else {
1604 self.autoindent_requests.clear();
1605 for tx in self.wait_for_autoindent_txs.drain(..) {
1606 tx.send(()).ok();
1607 }
1608 }
1609 }
1610
1611 fn compute_autoindents(
1612 &self,
1613 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1614 let max_rows_between_yields = 100;
1615 let snapshot = self.snapshot();
1616 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1617 return None;
1618 }
1619
1620 let autoindent_requests = self.autoindent_requests.clone();
1621 Some(async move {
1622 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1623 for request in autoindent_requests {
1624 // Resolve each edited range to its row in the current buffer and in the
1625 // buffer before this batch of edits.
1626 let mut row_ranges = Vec::new();
1627 let mut old_to_new_rows = BTreeMap::new();
1628 let mut language_indent_sizes_by_new_row = Vec::new();
1629 for entry in &request.entries {
1630 let position = entry.range.start;
1631 let new_row = position.to_point(&snapshot).row;
1632 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1633 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1634
1635 if !entry.first_line_is_new {
1636 let old_row = position.to_point(&request.before_edit).row;
1637 old_to_new_rows.insert(old_row, new_row);
1638 }
1639 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1640 }
1641
1642 // Build a map containing the suggested indentation for each of the edited lines
1643 // with respect to the state of the buffer before these edits. This map is keyed
1644 // by the rows for these lines in the current state of the buffer.
1645 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1646 let old_edited_ranges =
1647 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1648 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1649 let mut language_indent_size = IndentSize::default();
1650 for old_edited_range in old_edited_ranges {
1651 let suggestions = request
1652 .before_edit
1653 .suggest_autoindents(old_edited_range.clone())
1654 .into_iter()
1655 .flatten();
1656 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1657 if let Some(suggestion) = suggestion {
1658 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1659
1660 // Find the indent size based on the language for this row.
1661 while let Some((row, size)) = language_indent_sizes.peek() {
1662 if *row > new_row {
1663 break;
1664 }
1665 language_indent_size = *size;
1666 language_indent_sizes.next();
1667 }
1668
1669 let suggested_indent = old_to_new_rows
1670 .get(&suggestion.basis_row)
1671 .and_then(|from_row| {
1672 Some(old_suggestions.get(from_row).copied()?.0)
1673 })
1674 .unwrap_or_else(|| {
1675 request
1676 .before_edit
1677 .indent_size_for_line(suggestion.basis_row)
1678 })
1679 .with_delta(suggestion.delta, language_indent_size);
1680 old_suggestions
1681 .insert(new_row, (suggested_indent, suggestion.within_error));
1682 }
1683 }
1684 yield_now().await;
1685 }
1686
1687 // Compute new suggestions for each line, but only include them in the result
1688 // if they differ from the old suggestion for that line.
1689 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1690 let mut language_indent_size = IndentSize::default();
1691 for (row_range, original_indent_column) in row_ranges {
1692 let new_edited_row_range = if request.is_block_mode {
1693 row_range.start..row_range.start + 1
1694 } else {
1695 row_range.clone()
1696 };
1697
1698 let suggestions = snapshot
1699 .suggest_autoindents(new_edited_row_range.clone())
1700 .into_iter()
1701 .flatten();
1702 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1703 if let Some(suggestion) = suggestion {
1704 // Find the indent size based on the language for this row.
1705 while let Some((row, size)) = language_indent_sizes.peek() {
1706 if *row > new_row {
1707 break;
1708 }
1709 language_indent_size = *size;
1710 language_indent_sizes.next();
1711 }
1712
1713 let suggested_indent = indent_sizes
1714 .get(&suggestion.basis_row)
1715 .copied()
1716 .map(|e| e.0)
1717 .unwrap_or_else(|| {
1718 snapshot.indent_size_for_line(suggestion.basis_row)
1719 })
1720 .with_delta(suggestion.delta, language_indent_size);
1721
1722 if old_suggestions.get(&new_row).map_or(
1723 true,
1724 |(old_indentation, was_within_error)| {
1725 suggested_indent != *old_indentation
1726 && (!suggestion.within_error || *was_within_error)
1727 },
1728 ) {
1729 indent_sizes.insert(
1730 new_row,
1731 (suggested_indent, request.ignore_empty_lines),
1732 );
1733 }
1734 }
1735 }
1736
1737 if let (true, Some(original_indent_column)) =
1738 (request.is_block_mode, original_indent_column)
1739 {
1740 let new_indent =
1741 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1742 *indent
1743 } else {
1744 snapshot.indent_size_for_line(row_range.start)
1745 };
1746 let delta = new_indent.len as i64 - original_indent_column as i64;
1747 if delta != 0 {
1748 for row in row_range.skip(1) {
1749 indent_sizes.entry(row).or_insert_with(|| {
1750 let mut size = snapshot.indent_size_for_line(row);
1751 if size.kind == new_indent.kind {
1752 match delta.cmp(&0) {
1753 Ordering::Greater => size.len += delta as u32,
1754 Ordering::Less => {
1755 size.len = size.len.saturating_sub(-delta as u32)
1756 }
1757 Ordering::Equal => {}
1758 }
1759 }
1760 (size, request.ignore_empty_lines)
1761 });
1762 }
1763 }
1764 }
1765
1766 yield_now().await;
1767 }
1768 }
1769
1770 indent_sizes
1771 .into_iter()
1772 .filter_map(|(row, (indent, ignore_empty_lines))| {
1773 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1774 None
1775 } else {
1776 Some((row, indent))
1777 }
1778 })
1779 .collect()
1780 })
1781 }
1782
1783 fn apply_autoindents(
1784 &mut self,
1785 indent_sizes: BTreeMap<u32, IndentSize>,
1786 cx: &mut Context<Self>,
1787 ) {
1788 self.autoindent_requests.clear();
1789 for tx in self.wait_for_autoindent_txs.drain(..) {
1790 tx.send(()).ok();
1791 }
1792
1793 let edits: Vec<_> = indent_sizes
1794 .into_iter()
1795 .filter_map(|(row, indent_size)| {
1796 let current_size = indent_size_for_line(self, row);
1797 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1798 })
1799 .collect();
1800
1801 let preserve_preview = self.preserve_preview();
1802 self.edit(edits, None, cx);
1803 if preserve_preview {
1804 self.refresh_preview();
1805 }
1806 }
1807
1808 /// Create a minimal edit that will cause the given row to be indented
1809 /// with the given size. After applying this edit, the length of the line
1810 /// will always be at least `new_size.len`.
1811 pub fn edit_for_indent_size_adjustment(
1812 row: u32,
1813 current_size: IndentSize,
1814 new_size: IndentSize,
1815 ) -> Option<(Range<Point>, String)> {
1816 if new_size.kind == current_size.kind {
1817 match new_size.len.cmp(¤t_size.len) {
1818 Ordering::Greater => {
1819 let point = Point::new(row, 0);
1820 Some((
1821 point..point,
1822 iter::repeat(new_size.char())
1823 .take((new_size.len - current_size.len) as usize)
1824 .collect::<String>(),
1825 ))
1826 }
1827
1828 Ordering::Less => Some((
1829 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1830 String::new(),
1831 )),
1832
1833 Ordering::Equal => None,
1834 }
1835 } else {
1836 Some((
1837 Point::new(row, 0)..Point::new(row, current_size.len),
1838 iter::repeat(new_size.char())
1839 .take(new_size.len as usize)
1840 .collect::<String>(),
1841 ))
1842 }
1843 }
1844
1845 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1846 /// and the given new text.
1847 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1848 let old_text = self.as_rope().clone();
1849 let base_version = self.version();
1850 cx.background_executor()
1851 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1852 let old_text = old_text.to_string();
1853 let line_ending = LineEnding::detect(&new_text);
1854 LineEnding::normalize(&mut new_text);
1855 let edits = text_diff(&old_text, &new_text);
1856 Diff {
1857 base_version,
1858 line_ending,
1859 edits,
1860 }
1861 })
1862 }
1863
1864 /// Spawns a background task that searches the buffer for any whitespace
1865 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1866 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1867 let old_text = self.as_rope().clone();
1868 let line_ending = self.line_ending();
1869 let base_version = self.version();
1870 cx.background_spawn(async move {
1871 let ranges = trailing_whitespace_ranges(&old_text);
1872 let empty = Arc::<str>::from("");
1873 Diff {
1874 base_version,
1875 line_ending,
1876 edits: ranges
1877 .into_iter()
1878 .map(|range| (range, empty.clone()))
1879 .collect(),
1880 }
1881 })
1882 }
1883
1884 /// Ensures that the buffer ends with a single newline character, and
1885 /// no other whitespace. Skips if the buffer is empty.
1886 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1887 let len = self.len();
1888 if len == 0 {
1889 return;
1890 }
1891 let mut offset = len;
1892 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1893 let non_whitespace_len = chunk
1894 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1895 .len();
1896 offset -= chunk.len();
1897 offset += non_whitespace_len;
1898 if non_whitespace_len != 0 {
1899 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1900 return;
1901 }
1902 break;
1903 }
1904 }
1905 self.edit([(offset..len, "\n")], None, cx);
1906 }
1907
1908 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1909 /// calculated, then adjust the diff to account for those changes, and discard any
1910 /// parts of the diff that conflict with those changes.
1911 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1912 let snapshot = self.snapshot();
1913 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1914 let mut delta = 0;
1915 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1916 while let Some(edit_since) = edits_since.peek() {
1917 // If the edit occurs after a diff hunk, then it does not
1918 // affect that hunk.
1919 if edit_since.old.start > range.end {
1920 break;
1921 }
1922 // If the edit precedes the diff hunk, then adjust the hunk
1923 // to reflect the edit.
1924 else if edit_since.old.end < range.start {
1925 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1926 edits_since.next();
1927 }
1928 // If the edit intersects a diff hunk, then discard that hunk.
1929 else {
1930 return None;
1931 }
1932 }
1933
1934 let start = (range.start as i64 + delta) as usize;
1935 let end = (range.end as i64 + delta) as usize;
1936 Some((start..end, new_text))
1937 });
1938
1939 self.start_transaction();
1940 self.text.set_line_ending(diff.line_ending);
1941 self.edit(adjusted_edits, None, cx);
1942 self.end_transaction(cx)
1943 }
1944
1945 fn has_unsaved_edits(&self) -> bool {
1946 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1947
1948 if last_version == self.version {
1949 self.has_unsaved_edits
1950 .set((last_version, has_unsaved_edits));
1951 return has_unsaved_edits;
1952 }
1953
1954 let has_edits = self.has_edits_since(&self.saved_version);
1955 self.has_unsaved_edits
1956 .set((self.version.clone(), has_edits));
1957 has_edits
1958 }
1959
1960 /// Checks if the buffer has unsaved changes.
1961 pub fn is_dirty(&self) -> bool {
1962 if self.capability == Capability::ReadOnly {
1963 return false;
1964 }
1965 if self.has_conflict {
1966 return true;
1967 }
1968 match self.file.as_ref().map(|f| f.disk_state()) {
1969 Some(DiskState::New) | Some(DiskState::Deleted) => {
1970 !self.is_empty() && self.has_unsaved_edits()
1971 }
1972 _ => self.has_unsaved_edits(),
1973 }
1974 }
1975
1976 /// Checks if the buffer and its file have both changed since the buffer
1977 /// was last saved or reloaded.
1978 pub fn has_conflict(&self) -> bool {
1979 if self.has_conflict {
1980 return true;
1981 }
1982 let Some(file) = self.file.as_ref() else {
1983 return false;
1984 };
1985 match file.disk_state() {
1986 DiskState::New => false,
1987 DiskState::Present { mtime } => match self.saved_mtime {
1988 Some(saved_mtime) => {
1989 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1990 }
1991 None => true,
1992 },
1993 DiskState::Deleted => false,
1994 }
1995 }
1996
1997 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1998 pub fn subscribe(&mut self) -> Subscription {
1999 self.text.subscribe()
2000 }
2001
2002 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2003 ///
2004 /// This allows downstream code to check if the buffer's text has changed without
2005 /// waiting for an effect cycle, which would be required if using eents.
2006 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2007 if let Err(ix) = self
2008 .change_bits
2009 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2010 {
2011 self.change_bits.insert(ix, bit);
2012 }
2013 }
2014
2015 fn was_changed(&mut self) {
2016 self.change_bits.retain(|change_bit| {
2017 change_bit.upgrade().map_or(false, |bit| {
2018 bit.replace(true);
2019 true
2020 })
2021 });
2022 }
2023
2024 /// Starts a transaction, if one is not already in-progress. When undoing or
2025 /// redoing edits, all of the edits performed within a transaction are undone
2026 /// or redone together.
2027 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2028 self.start_transaction_at(Instant::now())
2029 }
2030
2031 /// Starts a transaction, providing the current time. Subsequent transactions
2032 /// that occur within a short period of time will be grouped together. This
2033 /// is controlled by the buffer's undo grouping duration.
2034 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2035 self.transaction_depth += 1;
2036 if self.was_dirty_before_starting_transaction.is_none() {
2037 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2038 }
2039 self.text.start_transaction_at(now)
2040 }
2041
2042 /// Terminates the current transaction, if this is the outermost transaction.
2043 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2044 self.end_transaction_at(Instant::now(), cx)
2045 }
2046
2047 /// Terminates the current transaction, providing the current time. Subsequent transactions
2048 /// that occur within a short period of time will be grouped together. This
2049 /// is controlled by the buffer's undo grouping duration.
2050 pub fn end_transaction_at(
2051 &mut self,
2052 now: Instant,
2053 cx: &mut Context<Self>,
2054 ) -> Option<TransactionId> {
2055 assert!(self.transaction_depth > 0);
2056 self.transaction_depth -= 1;
2057 let was_dirty = if self.transaction_depth == 0 {
2058 self.was_dirty_before_starting_transaction.take().unwrap()
2059 } else {
2060 false
2061 };
2062 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2063 self.did_edit(&start_version, was_dirty, cx);
2064 Some(transaction_id)
2065 } else {
2066 None
2067 }
2068 }
2069
2070 /// Manually add a transaction to the buffer's undo history.
2071 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2072 self.text.push_transaction(transaction, now);
2073 }
2074
2075 /// Differs from `push_transaction` in that it does not clear the redo
2076 /// stack. Intended to be used to create a parent transaction to merge
2077 /// potential child transactions into.
2078 ///
2079 /// The caller is responsible for removing it from the undo history using
2080 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2081 /// are merged into this transaction, the caller is responsible for ensuring
2082 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2083 /// cleared is to create transactions with the usual `start_transaction` and
2084 /// `end_transaction` methods and merging the resulting transactions into
2085 /// the transaction created by this method
2086 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2087 self.text.push_empty_transaction(now)
2088 }
2089
2090 /// Prevent the last transaction from being grouped with any subsequent transactions,
2091 /// even if they occur with the buffer's undo grouping duration.
2092 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2093 self.text.finalize_last_transaction()
2094 }
2095
2096 /// Manually group all changes since a given transaction.
2097 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2098 self.text.group_until_transaction(transaction_id);
2099 }
2100
2101 /// Manually remove a transaction from the buffer's undo history
2102 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2103 self.text.forget_transaction(transaction_id)
2104 }
2105
2106 /// Retrieve a transaction from the buffer's undo history
2107 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2108 self.text.get_transaction(transaction_id)
2109 }
2110
2111 /// Manually merge two transactions in the buffer's undo history.
2112 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2113 self.text.merge_transactions(transaction, destination);
2114 }
2115
2116 /// Waits for the buffer to receive operations with the given timestamps.
2117 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2118 &mut self,
2119 edit_ids: It,
2120 ) -> impl Future<Output = Result<()>> + use<It> {
2121 self.text.wait_for_edits(edit_ids)
2122 }
2123
2124 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2125 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2126 &mut self,
2127 anchors: It,
2128 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2129 self.text.wait_for_anchors(anchors)
2130 }
2131
2132 /// Waits for the buffer to receive operations up to the given version.
2133 pub fn wait_for_version(
2134 &mut self,
2135 version: clock::Global,
2136 ) -> impl Future<Output = Result<()>> + use<> {
2137 self.text.wait_for_version(version)
2138 }
2139
2140 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2141 /// [`Buffer::wait_for_version`] to resolve with an error.
2142 pub fn give_up_waiting(&mut self) {
2143 self.text.give_up_waiting();
2144 }
2145
2146 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2147 let mut rx = None;
2148 if !self.autoindent_requests.is_empty() {
2149 let channel = oneshot::channel();
2150 self.wait_for_autoindent_txs.push(channel.0);
2151 rx = Some(channel.1);
2152 }
2153 rx
2154 }
2155
2156 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2157 pub fn set_active_selections(
2158 &mut self,
2159 selections: Arc<[Selection<Anchor>]>,
2160 line_mode: bool,
2161 cursor_shape: CursorShape,
2162 cx: &mut Context<Self>,
2163 ) {
2164 let lamport_timestamp = self.text.lamport_clock.tick();
2165 self.remote_selections.insert(
2166 self.text.replica_id(),
2167 SelectionSet {
2168 selections: selections.clone(),
2169 lamport_timestamp,
2170 line_mode,
2171 cursor_shape,
2172 },
2173 );
2174 self.send_operation(
2175 Operation::UpdateSelections {
2176 selections,
2177 line_mode,
2178 lamport_timestamp,
2179 cursor_shape,
2180 },
2181 true,
2182 cx,
2183 );
2184 self.non_text_state_update_count += 1;
2185 cx.notify();
2186 }
2187
2188 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2189 /// this replica.
2190 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2191 if self
2192 .remote_selections
2193 .get(&self.text.replica_id())
2194 .map_or(true, |set| !set.selections.is_empty())
2195 {
2196 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2197 }
2198 }
2199
2200 pub fn set_agent_selections(
2201 &mut self,
2202 selections: Arc<[Selection<Anchor>]>,
2203 line_mode: bool,
2204 cursor_shape: CursorShape,
2205 cx: &mut Context<Self>,
2206 ) {
2207 let lamport_timestamp = self.text.lamport_clock.tick();
2208 self.remote_selections.insert(
2209 AGENT_REPLICA_ID,
2210 SelectionSet {
2211 selections: selections.clone(),
2212 lamport_timestamp,
2213 line_mode,
2214 cursor_shape,
2215 },
2216 );
2217 self.non_text_state_update_count += 1;
2218 cx.notify();
2219 }
2220
2221 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2222 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2223 }
2224
2225 /// Replaces the buffer's entire text.
2226 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2227 where
2228 T: Into<Arc<str>>,
2229 {
2230 self.autoindent_requests.clear();
2231 self.edit([(0..self.len(), text)], None, cx)
2232 }
2233
2234 /// Appends the given text to the end of the buffer.
2235 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2236 where
2237 T: Into<Arc<str>>,
2238 {
2239 self.edit([(self.len()..self.len(), text)], None, cx)
2240 }
2241
2242 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2243 /// delete, and a string of text to insert at that location.
2244 ///
2245 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2246 /// request for the edited ranges, which will be processed when the buffer finishes
2247 /// parsing.
2248 ///
2249 /// Parsing takes place at the end of a transaction, and may compute synchronously
2250 /// or asynchronously, depending on the changes.
2251 pub fn edit<I, S, T>(
2252 &mut self,
2253 edits_iter: I,
2254 autoindent_mode: Option<AutoindentMode>,
2255 cx: &mut Context<Self>,
2256 ) -> Option<clock::Lamport>
2257 where
2258 I: IntoIterator<Item = (Range<S>, T)>,
2259 S: ToOffset,
2260 T: Into<Arc<str>>,
2261 {
2262 // Skip invalid edits and coalesce contiguous ones.
2263 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2264
2265 for (range, new_text) in edits_iter {
2266 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2267
2268 if range.start > range.end {
2269 mem::swap(&mut range.start, &mut range.end);
2270 }
2271 let new_text = new_text.into();
2272 if !new_text.is_empty() || !range.is_empty() {
2273 if let Some((prev_range, prev_text)) = edits.last_mut()
2274 && prev_range.end >= range.start
2275 {
2276 prev_range.end = cmp::max(prev_range.end, range.end);
2277 *prev_text = format!("{prev_text}{new_text}").into();
2278 } else {
2279 edits.push((range, new_text));
2280 }
2281 }
2282 }
2283 if edits.is_empty() {
2284 return None;
2285 }
2286
2287 self.start_transaction();
2288 self.pending_autoindent.take();
2289 let autoindent_request = autoindent_mode
2290 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2291
2292 let edit_operation = self.text.edit(edits.iter().cloned());
2293 let edit_id = edit_operation.timestamp();
2294
2295 if let Some((before_edit, mode)) = autoindent_request {
2296 let mut delta = 0isize;
2297 let mut previous_setting = None;
2298 let entries: Vec<_> = edits
2299 .into_iter()
2300 .enumerate()
2301 .zip(&edit_operation.as_edit().unwrap().new_text)
2302 .filter(|((_, (range, _)), _)| {
2303 let language = before_edit.language_at(range.start);
2304 let language_id = language.map(|l| l.id());
2305 if let Some((cached_language_id, auto_indent)) = previous_setting
2306 && cached_language_id == language_id
2307 {
2308 auto_indent
2309 } else {
2310 // The auto-indent setting is not present in editorconfigs, hence
2311 // we can avoid passing the file here.
2312 let auto_indent =
2313 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2314 previous_setting = Some((language_id, auto_indent));
2315 auto_indent
2316 }
2317 })
2318 .map(|((ix, (range, _)), new_text)| {
2319 let new_text_length = new_text.len();
2320 let old_start = range.start.to_point(&before_edit);
2321 let new_start = (delta + range.start as isize) as usize;
2322 let range_len = range.end - range.start;
2323 delta += new_text_length as isize - range_len as isize;
2324
2325 // Decide what range of the insertion to auto-indent, and whether
2326 // the first line of the insertion should be considered a newly-inserted line
2327 // or an edit to an existing line.
2328 let mut range_of_insertion_to_indent = 0..new_text_length;
2329 let mut first_line_is_new = true;
2330
2331 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2332 let old_line_end = before_edit.line_len(old_start.row);
2333
2334 if old_start.column > old_line_start {
2335 first_line_is_new = false;
2336 }
2337
2338 if !new_text.contains('\n')
2339 && (old_start.column + (range_len as u32) < old_line_end
2340 || old_line_end == old_line_start)
2341 {
2342 first_line_is_new = false;
2343 }
2344
2345 // When inserting text starting with a newline, avoid auto-indenting the
2346 // previous line.
2347 if new_text.starts_with('\n') {
2348 range_of_insertion_to_indent.start += 1;
2349 first_line_is_new = true;
2350 }
2351
2352 let mut original_indent_column = None;
2353 if let AutoindentMode::Block {
2354 original_indent_columns,
2355 } = &mode
2356 {
2357 original_indent_column = Some(if new_text.starts_with('\n') {
2358 indent_size_for_text(
2359 new_text[range_of_insertion_to_indent.clone()].chars(),
2360 )
2361 .len
2362 } else {
2363 original_indent_columns
2364 .get(ix)
2365 .copied()
2366 .flatten()
2367 .unwrap_or_else(|| {
2368 indent_size_for_text(
2369 new_text[range_of_insertion_to_indent.clone()].chars(),
2370 )
2371 .len
2372 })
2373 });
2374
2375 // Avoid auto-indenting the line after the edit.
2376 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2377 range_of_insertion_to_indent.end -= 1;
2378 }
2379 }
2380
2381 AutoindentRequestEntry {
2382 first_line_is_new,
2383 original_indent_column,
2384 indent_size: before_edit.language_indent_size_at(range.start, cx),
2385 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2386 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2387 }
2388 })
2389 .collect();
2390
2391 if !entries.is_empty() {
2392 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2393 before_edit,
2394 entries,
2395 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2396 ignore_empty_lines: false,
2397 }));
2398 }
2399 }
2400
2401 self.end_transaction(cx);
2402 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2403 Some(edit_id)
2404 }
2405
2406 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2407 self.was_changed();
2408
2409 if self.edits_since::<usize>(old_version).next().is_none() {
2410 return;
2411 }
2412
2413 self.reparse(cx);
2414 cx.emit(BufferEvent::Edited);
2415 if was_dirty != self.is_dirty() {
2416 cx.emit(BufferEvent::DirtyChanged);
2417 }
2418 cx.notify();
2419 }
2420
2421 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2422 where
2423 I: IntoIterator<Item = Range<T>>,
2424 T: ToOffset + Copy,
2425 {
2426 let before_edit = self.snapshot();
2427 let entries = ranges
2428 .into_iter()
2429 .map(|range| AutoindentRequestEntry {
2430 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2431 first_line_is_new: true,
2432 indent_size: before_edit.language_indent_size_at(range.start, cx),
2433 original_indent_column: None,
2434 })
2435 .collect();
2436 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2437 before_edit,
2438 entries,
2439 is_block_mode: false,
2440 ignore_empty_lines: true,
2441 }));
2442 self.request_autoindent(cx);
2443 }
2444
2445 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2446 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2447 pub fn insert_empty_line(
2448 &mut self,
2449 position: impl ToPoint,
2450 space_above: bool,
2451 space_below: bool,
2452 cx: &mut Context<Self>,
2453 ) -> Point {
2454 let mut position = position.to_point(self);
2455
2456 self.start_transaction();
2457
2458 self.edit(
2459 [(position..position, "\n")],
2460 Some(AutoindentMode::EachLine),
2461 cx,
2462 );
2463
2464 if position.column > 0 {
2465 position += Point::new(1, 0);
2466 }
2467
2468 if !self.is_line_blank(position.row) {
2469 self.edit(
2470 [(position..position, "\n")],
2471 Some(AutoindentMode::EachLine),
2472 cx,
2473 );
2474 }
2475
2476 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2477 self.edit(
2478 [(position..position, "\n")],
2479 Some(AutoindentMode::EachLine),
2480 cx,
2481 );
2482 position.row += 1;
2483 }
2484
2485 if space_below
2486 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2487 {
2488 self.edit(
2489 [(position..position, "\n")],
2490 Some(AutoindentMode::EachLine),
2491 cx,
2492 );
2493 }
2494
2495 self.end_transaction(cx);
2496
2497 position
2498 }
2499
2500 /// Applies the given remote operations to the buffer.
2501 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2502 self.pending_autoindent.take();
2503 let was_dirty = self.is_dirty();
2504 let old_version = self.version.clone();
2505 let mut deferred_ops = Vec::new();
2506 let buffer_ops = ops
2507 .into_iter()
2508 .filter_map(|op| match op {
2509 Operation::Buffer(op) => Some(op),
2510 _ => {
2511 if self.can_apply_op(&op) {
2512 self.apply_op(op, cx);
2513 } else {
2514 deferred_ops.push(op);
2515 }
2516 None
2517 }
2518 })
2519 .collect::<Vec<_>>();
2520 for operation in buffer_ops.iter() {
2521 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2522 }
2523 self.text.apply_ops(buffer_ops);
2524 self.deferred_ops.insert(deferred_ops);
2525 self.flush_deferred_ops(cx);
2526 self.did_edit(&old_version, was_dirty, cx);
2527 // Notify independently of whether the buffer was edited as the operations could include a
2528 // selection update.
2529 cx.notify();
2530 }
2531
2532 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2533 let mut deferred_ops = Vec::new();
2534 for op in self.deferred_ops.drain().iter().cloned() {
2535 if self.can_apply_op(&op) {
2536 self.apply_op(op, cx);
2537 } else {
2538 deferred_ops.push(op);
2539 }
2540 }
2541 self.deferred_ops.insert(deferred_ops);
2542 }
2543
2544 pub fn has_deferred_ops(&self) -> bool {
2545 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2546 }
2547
2548 fn can_apply_op(&self, operation: &Operation) -> bool {
2549 match operation {
2550 Operation::Buffer(_) => {
2551 unreachable!("buffer operations should never be applied at this layer")
2552 }
2553 Operation::UpdateDiagnostics {
2554 diagnostics: diagnostic_set,
2555 ..
2556 } => diagnostic_set.iter().all(|diagnostic| {
2557 self.text.can_resolve(&diagnostic.range.start)
2558 && self.text.can_resolve(&diagnostic.range.end)
2559 }),
2560 Operation::UpdateSelections { selections, .. } => selections
2561 .iter()
2562 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2563 Operation::UpdateCompletionTriggers { .. } => true,
2564 }
2565 }
2566
2567 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2568 match operation {
2569 Operation::Buffer(_) => {
2570 unreachable!("buffer operations should never be applied at this layer")
2571 }
2572 Operation::UpdateDiagnostics {
2573 server_id,
2574 diagnostics: diagnostic_set,
2575 lamport_timestamp,
2576 } => {
2577 let snapshot = self.snapshot();
2578 self.apply_diagnostic_update(
2579 server_id,
2580 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2581 lamport_timestamp,
2582 cx,
2583 );
2584 }
2585 Operation::UpdateSelections {
2586 selections,
2587 lamport_timestamp,
2588 line_mode,
2589 cursor_shape,
2590 } => {
2591 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2592 && set.lamport_timestamp > lamport_timestamp
2593 {
2594 return;
2595 }
2596
2597 self.remote_selections.insert(
2598 lamport_timestamp.replica_id,
2599 SelectionSet {
2600 selections,
2601 lamport_timestamp,
2602 line_mode,
2603 cursor_shape,
2604 },
2605 );
2606 self.text.lamport_clock.observe(lamport_timestamp);
2607 self.non_text_state_update_count += 1;
2608 }
2609 Operation::UpdateCompletionTriggers {
2610 triggers,
2611 lamport_timestamp,
2612 server_id,
2613 } => {
2614 if triggers.is_empty() {
2615 self.completion_triggers_per_language_server
2616 .remove(&server_id);
2617 self.completion_triggers = self
2618 .completion_triggers_per_language_server
2619 .values()
2620 .flat_map(|triggers| triggers.into_iter().cloned())
2621 .collect();
2622 } else {
2623 self.completion_triggers_per_language_server
2624 .insert(server_id, triggers.iter().cloned().collect());
2625 self.completion_triggers.extend(triggers);
2626 }
2627 self.text.lamport_clock.observe(lamport_timestamp);
2628 }
2629 }
2630 }
2631
2632 fn apply_diagnostic_update(
2633 &mut self,
2634 server_id: LanguageServerId,
2635 diagnostics: DiagnosticSet,
2636 lamport_timestamp: clock::Lamport,
2637 cx: &mut Context<Self>,
2638 ) {
2639 if lamport_timestamp > self.diagnostics_timestamp {
2640 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2641 if diagnostics.is_empty() {
2642 if let Ok(ix) = ix {
2643 self.diagnostics.remove(ix);
2644 }
2645 } else {
2646 match ix {
2647 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2648 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2649 };
2650 }
2651 self.diagnostics_timestamp = lamport_timestamp;
2652 self.non_text_state_update_count += 1;
2653 self.text.lamport_clock.observe(lamport_timestamp);
2654 cx.notify();
2655 cx.emit(BufferEvent::DiagnosticsUpdated);
2656 }
2657 }
2658
2659 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2660 self.was_changed();
2661 cx.emit(BufferEvent::Operation {
2662 operation,
2663 is_local,
2664 });
2665 }
2666
2667 /// Removes the selections for a given peer.
2668 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2669 self.remote_selections.remove(&replica_id);
2670 cx.notify();
2671 }
2672
2673 /// Undoes the most recent transaction.
2674 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2675 let was_dirty = self.is_dirty();
2676 let old_version = self.version.clone();
2677
2678 if let Some((transaction_id, operation)) = self.text.undo() {
2679 self.send_operation(Operation::Buffer(operation), true, cx);
2680 self.did_edit(&old_version, was_dirty, cx);
2681 Some(transaction_id)
2682 } else {
2683 None
2684 }
2685 }
2686
2687 /// Manually undoes a specific transaction in the buffer's undo history.
2688 pub fn undo_transaction(
2689 &mut self,
2690 transaction_id: TransactionId,
2691 cx: &mut Context<Self>,
2692 ) -> bool {
2693 let was_dirty = self.is_dirty();
2694 let old_version = self.version.clone();
2695 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2696 self.send_operation(Operation::Buffer(operation), true, cx);
2697 self.did_edit(&old_version, was_dirty, cx);
2698 true
2699 } else {
2700 false
2701 }
2702 }
2703
2704 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2705 pub fn undo_to_transaction(
2706 &mut self,
2707 transaction_id: TransactionId,
2708 cx: &mut Context<Self>,
2709 ) -> bool {
2710 let was_dirty = self.is_dirty();
2711 let old_version = self.version.clone();
2712
2713 let operations = self.text.undo_to_transaction(transaction_id);
2714 let undone = !operations.is_empty();
2715 for operation in operations {
2716 self.send_operation(Operation::Buffer(operation), true, cx);
2717 }
2718 if undone {
2719 self.did_edit(&old_version, was_dirty, cx)
2720 }
2721 undone
2722 }
2723
2724 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2725 let was_dirty = self.is_dirty();
2726 let operation = self.text.undo_operations(counts);
2727 let old_version = self.version.clone();
2728 self.send_operation(Operation::Buffer(operation), true, cx);
2729 self.did_edit(&old_version, was_dirty, cx);
2730 }
2731
2732 /// Manually redoes a specific transaction in the buffer's redo history.
2733 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2734 let was_dirty = self.is_dirty();
2735 let old_version = self.version.clone();
2736
2737 if let Some((transaction_id, operation)) = self.text.redo() {
2738 self.send_operation(Operation::Buffer(operation), true, cx);
2739 self.did_edit(&old_version, was_dirty, cx);
2740 Some(transaction_id)
2741 } else {
2742 None
2743 }
2744 }
2745
2746 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2747 pub fn redo_to_transaction(
2748 &mut self,
2749 transaction_id: TransactionId,
2750 cx: &mut Context<Self>,
2751 ) -> bool {
2752 let was_dirty = self.is_dirty();
2753 let old_version = self.version.clone();
2754
2755 let operations = self.text.redo_to_transaction(transaction_id);
2756 let redone = !operations.is_empty();
2757 for operation in operations {
2758 self.send_operation(Operation::Buffer(operation), true, cx);
2759 }
2760 if redone {
2761 self.did_edit(&old_version, was_dirty, cx)
2762 }
2763 redone
2764 }
2765
2766 /// Override current completion triggers with the user-provided completion triggers.
2767 pub fn set_completion_triggers(
2768 &mut self,
2769 server_id: LanguageServerId,
2770 triggers: BTreeSet<String>,
2771 cx: &mut Context<Self>,
2772 ) {
2773 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2774 if triggers.is_empty() {
2775 self.completion_triggers_per_language_server
2776 .remove(&server_id);
2777 self.completion_triggers = self
2778 .completion_triggers_per_language_server
2779 .values()
2780 .flat_map(|triggers| triggers.into_iter().cloned())
2781 .collect();
2782 } else {
2783 self.completion_triggers_per_language_server
2784 .insert(server_id, triggers.clone());
2785 self.completion_triggers.extend(triggers.iter().cloned());
2786 }
2787 self.send_operation(
2788 Operation::UpdateCompletionTriggers {
2789 triggers: triggers.into_iter().collect(),
2790 lamport_timestamp: self.completion_triggers_timestamp,
2791 server_id,
2792 },
2793 true,
2794 cx,
2795 );
2796 cx.notify();
2797 }
2798
2799 /// Returns a list of strings which trigger a completion menu for this language.
2800 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2801 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2802 &self.completion_triggers
2803 }
2804
2805 /// Call this directly after performing edits to prevent the preview tab
2806 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2807 /// to return false until there are additional edits.
2808 pub fn refresh_preview(&mut self) {
2809 self.preview_version = self.version.clone();
2810 }
2811
2812 /// Whether we should preserve the preview status of a tab containing this buffer.
2813 pub fn preserve_preview(&self) -> bool {
2814 !self.has_edits_since(&self.preview_version)
2815 }
2816}
2817
2818#[doc(hidden)]
2819#[cfg(any(test, feature = "test-support"))]
2820impl Buffer {
2821 pub fn edit_via_marked_text(
2822 &mut self,
2823 marked_string: &str,
2824 autoindent_mode: Option<AutoindentMode>,
2825 cx: &mut Context<Self>,
2826 ) {
2827 let edits = self.edits_for_marked_text(marked_string);
2828 self.edit(edits, autoindent_mode, cx);
2829 }
2830
2831 pub fn set_group_interval(&mut self, group_interval: Duration) {
2832 self.text.set_group_interval(group_interval);
2833 }
2834
2835 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2836 where
2837 T: rand::Rng,
2838 {
2839 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2840 let mut last_end = None;
2841 for _ in 0..old_range_count {
2842 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2843 break;
2844 }
2845
2846 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2847 let mut range = self.random_byte_range(new_start, rng);
2848 if rng.gen_bool(0.2) {
2849 mem::swap(&mut range.start, &mut range.end);
2850 }
2851 last_end = Some(range.end);
2852
2853 let new_text_len = rng.gen_range(0..10);
2854 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2855 new_text = new_text.to_uppercase();
2856
2857 edits.push((range, new_text));
2858 }
2859 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2860 self.edit(edits, None, cx);
2861 }
2862
2863 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2864 let was_dirty = self.is_dirty();
2865 let old_version = self.version.clone();
2866
2867 let ops = self.text.randomly_undo_redo(rng);
2868 if !ops.is_empty() {
2869 for op in ops {
2870 self.send_operation(Operation::Buffer(op), true, cx);
2871 self.did_edit(&old_version, was_dirty, cx);
2872 }
2873 }
2874 }
2875}
2876
2877impl EventEmitter<BufferEvent> for Buffer {}
2878
2879impl Deref for Buffer {
2880 type Target = TextBuffer;
2881
2882 fn deref(&self) -> &Self::Target {
2883 &self.text
2884 }
2885}
2886
2887impl BufferSnapshot {
2888 /// Returns [`IndentSize`] for a given line that respects user settings and
2889 /// language preferences.
2890 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2891 indent_size_for_line(self, row)
2892 }
2893
2894 /// Returns [`IndentSize`] for a given position that respects user settings
2895 /// and language preferences.
2896 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2897 let settings = language_settings(
2898 self.language_at(position).map(|l| l.name()),
2899 self.file(),
2900 cx,
2901 );
2902 if settings.hard_tabs {
2903 IndentSize::tab()
2904 } else {
2905 IndentSize::spaces(settings.tab_size.get())
2906 }
2907 }
2908
2909 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2910 /// is passed in as `single_indent_size`.
2911 pub fn suggested_indents(
2912 &self,
2913 rows: impl Iterator<Item = u32>,
2914 single_indent_size: IndentSize,
2915 ) -> BTreeMap<u32, IndentSize> {
2916 let mut result = BTreeMap::new();
2917
2918 for row_range in contiguous_ranges(rows, 10) {
2919 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2920 Some(suggestions) => suggestions,
2921 _ => break,
2922 };
2923
2924 for (row, suggestion) in row_range.zip(suggestions) {
2925 let indent_size = if let Some(suggestion) = suggestion {
2926 result
2927 .get(&suggestion.basis_row)
2928 .copied()
2929 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2930 .with_delta(suggestion.delta, single_indent_size)
2931 } else {
2932 self.indent_size_for_line(row)
2933 };
2934
2935 result.insert(row, indent_size);
2936 }
2937 }
2938
2939 result
2940 }
2941
2942 fn suggest_autoindents(
2943 &self,
2944 row_range: Range<u32>,
2945 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2946 let config = &self.language.as_ref()?.config;
2947 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2948
2949 #[derive(Debug, Clone)]
2950 struct StartPosition {
2951 start: Point,
2952 suffix: SharedString,
2953 }
2954
2955 // Find the suggested indentation ranges based on the syntax tree.
2956 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2957 let end = Point::new(row_range.end, 0);
2958 let range = (start..end).to_offset(&self.text);
2959 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2960 Some(&grammar.indents_config.as_ref()?.query)
2961 });
2962 let indent_configs = matches
2963 .grammars()
2964 .iter()
2965 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2966 .collect::<Vec<_>>();
2967
2968 let mut indent_ranges = Vec::<Range<Point>>::new();
2969 let mut start_positions = Vec::<StartPosition>::new();
2970 let mut outdent_positions = Vec::<Point>::new();
2971 while let Some(mat) = matches.peek() {
2972 let mut start: Option<Point> = None;
2973 let mut end: Option<Point> = None;
2974
2975 let config = indent_configs[mat.grammar_index];
2976 for capture in mat.captures {
2977 if capture.index == config.indent_capture_ix {
2978 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2979 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2980 } else if Some(capture.index) == config.start_capture_ix {
2981 start = Some(Point::from_ts_point(capture.node.end_position()));
2982 } else if Some(capture.index) == config.end_capture_ix {
2983 end = Some(Point::from_ts_point(capture.node.start_position()));
2984 } else if Some(capture.index) == config.outdent_capture_ix {
2985 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2986 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2987 start_positions.push(StartPosition {
2988 start: Point::from_ts_point(capture.node.start_position()),
2989 suffix: suffix.clone(),
2990 });
2991 }
2992 }
2993
2994 matches.advance();
2995 if let Some((start, end)) = start.zip(end) {
2996 if start.row == end.row {
2997 continue;
2998 }
2999 let range = start..end;
3000 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3001 Err(ix) => indent_ranges.insert(ix, range),
3002 Ok(ix) => {
3003 let prev_range = &mut indent_ranges[ix];
3004 prev_range.end = prev_range.end.max(range.end);
3005 }
3006 }
3007 }
3008 }
3009
3010 let mut error_ranges = Vec::<Range<Point>>::new();
3011 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3012 grammar.error_query.as_ref()
3013 });
3014 while let Some(mat) = matches.peek() {
3015 let node = mat.captures[0].node;
3016 let start = Point::from_ts_point(node.start_position());
3017 let end = Point::from_ts_point(node.end_position());
3018 let range = start..end;
3019 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3020 Ok(ix) | Err(ix) => ix,
3021 };
3022 let mut end_ix = ix;
3023 while let Some(existing_range) = error_ranges.get(end_ix) {
3024 if existing_range.end < end {
3025 end_ix += 1;
3026 } else {
3027 break;
3028 }
3029 }
3030 error_ranges.splice(ix..end_ix, [range]);
3031 matches.advance();
3032 }
3033
3034 outdent_positions.sort();
3035 for outdent_position in outdent_positions {
3036 // find the innermost indent range containing this outdent_position
3037 // set its end to the outdent position
3038 if let Some(range_to_truncate) = indent_ranges
3039 .iter_mut()
3040 .filter(|indent_range| indent_range.contains(&outdent_position))
3041 .next_back()
3042 {
3043 range_to_truncate.end = outdent_position;
3044 }
3045 }
3046
3047 start_positions.sort_by_key(|b| b.start);
3048
3049 // Find the suggested indentation increases and decreased based on regexes.
3050 let mut regex_outdent_map = HashMap::default();
3051 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3052 let mut start_positions_iter = start_positions.iter().peekable();
3053
3054 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3055 self.for_each_line(
3056 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3057 ..Point::new(row_range.end, 0),
3058 |row, line| {
3059 if config
3060 .decrease_indent_pattern
3061 .as_ref()
3062 .map_or(false, |regex| regex.is_match(line))
3063 {
3064 indent_change_rows.push((row, Ordering::Less));
3065 }
3066 if config
3067 .increase_indent_pattern
3068 .as_ref()
3069 .map_or(false, |regex| regex.is_match(line))
3070 {
3071 indent_change_rows.push((row + 1, Ordering::Greater));
3072 }
3073 while let Some(pos) = start_positions_iter.peek() {
3074 if pos.start.row < row {
3075 let pos = start_positions_iter.next().unwrap();
3076 last_seen_suffix
3077 .entry(pos.suffix.to_string())
3078 .or_default()
3079 .push(pos.start);
3080 } else {
3081 break;
3082 }
3083 }
3084 for rule in &config.decrease_indent_patterns {
3085 if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) {
3086 let row_start_column = self.indent_size_for_line(row).len;
3087 let basis_row = rule
3088 .valid_after
3089 .iter()
3090 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3091 .flatten()
3092 .filter(|start_point| start_point.column <= row_start_column)
3093 .max_by_key(|start_point| start_point.row);
3094 if let Some(outdent_to_row) = basis_row {
3095 regex_outdent_map.insert(row, outdent_to_row.row);
3096 }
3097 break;
3098 }
3099 }
3100 },
3101 );
3102
3103 let mut indent_changes = indent_change_rows.into_iter().peekable();
3104 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3105 prev_non_blank_row.unwrap_or(0)
3106 } else {
3107 row_range.start.saturating_sub(1)
3108 };
3109
3110 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3111 Some(row_range.map(move |row| {
3112 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3113
3114 let mut indent_from_prev_row = false;
3115 let mut outdent_from_prev_row = false;
3116 let mut outdent_to_row = u32::MAX;
3117 let mut from_regex = false;
3118
3119 while let Some((indent_row, delta)) = indent_changes.peek() {
3120 match indent_row.cmp(&row) {
3121 Ordering::Equal => match delta {
3122 Ordering::Less => {
3123 from_regex = true;
3124 outdent_from_prev_row = true
3125 }
3126 Ordering::Greater => {
3127 indent_from_prev_row = true;
3128 from_regex = true
3129 }
3130 _ => {}
3131 },
3132
3133 Ordering::Greater => break,
3134 Ordering::Less => {}
3135 }
3136
3137 indent_changes.next();
3138 }
3139
3140 for range in &indent_ranges {
3141 if range.start.row >= row {
3142 break;
3143 }
3144 if range.start.row == prev_row && range.end > row_start {
3145 indent_from_prev_row = true;
3146 }
3147 if range.end > prev_row_start && range.end <= row_start {
3148 outdent_to_row = outdent_to_row.min(range.start.row);
3149 }
3150 }
3151
3152 if let Some(basis_row) = regex_outdent_map.get(&row) {
3153 indent_from_prev_row = false;
3154 outdent_to_row = *basis_row;
3155 from_regex = true;
3156 }
3157
3158 let within_error = error_ranges
3159 .iter()
3160 .any(|e| e.start.row < row && e.end > row_start);
3161
3162 let suggestion = if outdent_to_row == prev_row
3163 || (outdent_from_prev_row && indent_from_prev_row)
3164 {
3165 Some(IndentSuggestion {
3166 basis_row: prev_row,
3167 delta: Ordering::Equal,
3168 within_error: within_error && !from_regex,
3169 })
3170 } else if indent_from_prev_row {
3171 Some(IndentSuggestion {
3172 basis_row: prev_row,
3173 delta: Ordering::Greater,
3174 within_error: within_error && !from_regex,
3175 })
3176 } else if outdent_to_row < prev_row {
3177 Some(IndentSuggestion {
3178 basis_row: outdent_to_row,
3179 delta: Ordering::Equal,
3180 within_error: within_error && !from_regex,
3181 })
3182 } else if outdent_from_prev_row {
3183 Some(IndentSuggestion {
3184 basis_row: prev_row,
3185 delta: Ordering::Less,
3186 within_error: within_error && !from_regex,
3187 })
3188 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3189 {
3190 Some(IndentSuggestion {
3191 basis_row: prev_row,
3192 delta: Ordering::Equal,
3193 within_error: within_error && !from_regex,
3194 })
3195 } else {
3196 None
3197 };
3198
3199 prev_row = row;
3200 prev_row_start = row_start;
3201 suggestion
3202 }))
3203 }
3204
3205 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3206 while row > 0 {
3207 row -= 1;
3208 if !self.is_line_blank(row) {
3209 return Some(row);
3210 }
3211 }
3212 None
3213 }
3214
3215 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3216 let captures = self.syntax.captures(range, &self.text, |grammar| {
3217 grammar.highlights_query.as_ref()
3218 });
3219 let highlight_maps = captures
3220 .grammars()
3221 .iter()
3222 .map(|grammar| grammar.highlight_map())
3223 .collect();
3224 (captures, highlight_maps)
3225 }
3226
3227 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3228 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3229 /// returned in chunks where each chunk has a single syntax highlighting style and
3230 /// diagnostic status.
3231 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3232 let range = range.start.to_offset(self)..range.end.to_offset(self);
3233
3234 let mut syntax = None;
3235 if language_aware {
3236 syntax = Some(self.get_highlights(range.clone()));
3237 }
3238 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3239 let diagnostics = language_aware;
3240 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3241 }
3242
3243 pub fn highlighted_text_for_range<T: ToOffset>(
3244 &self,
3245 range: Range<T>,
3246 override_style: Option<HighlightStyle>,
3247 syntax_theme: &SyntaxTheme,
3248 ) -> HighlightedText {
3249 HighlightedText::from_buffer_range(
3250 range,
3251 &self.text,
3252 &self.syntax,
3253 override_style,
3254 syntax_theme,
3255 )
3256 }
3257
3258 /// Invokes the given callback for each line of text in the given range of the buffer.
3259 /// Uses callback to avoid allocating a string for each line.
3260 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3261 let mut line = String::new();
3262 let mut row = range.start.row;
3263 for chunk in self
3264 .as_rope()
3265 .chunks_in_range(range.to_offset(self))
3266 .chain(["\n"])
3267 {
3268 for (newline_ix, text) in chunk.split('\n').enumerate() {
3269 if newline_ix > 0 {
3270 callback(row, &line);
3271 row += 1;
3272 line.clear();
3273 }
3274 line.push_str(text);
3275 }
3276 }
3277 }
3278
3279 /// Iterates over every [`SyntaxLayer`] in the buffer.
3280 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3281 self.syntax
3282 .layers_for_range(0..self.len(), &self.text, true)
3283 }
3284
3285 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3286 let offset = position.to_offset(self);
3287 self.syntax
3288 .layers_for_range(offset..offset, &self.text, false)
3289 .filter(|l| l.node().end_byte() > offset)
3290 .last()
3291 }
3292
3293 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3294 &self,
3295 range: Range<D>,
3296 ) -> Option<SyntaxLayer<'_>> {
3297 let range = range.to_offset(self);
3298 return self
3299 .syntax
3300 .layers_for_range(range, &self.text, false)
3301 .max_by(|a, b| {
3302 if a.depth != b.depth {
3303 a.depth.cmp(&b.depth)
3304 } else if a.offset.0 != b.offset.0 {
3305 a.offset.0.cmp(&b.offset.0)
3306 } else {
3307 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3308 }
3309 });
3310 }
3311
3312 /// Returns the main [`Language`].
3313 pub fn language(&self) -> Option<&Arc<Language>> {
3314 self.language.as_ref()
3315 }
3316
3317 /// Returns the [`Language`] at the given location.
3318 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3319 self.syntax_layer_at(position)
3320 .map(|info| info.language)
3321 .or(self.language.as_ref())
3322 }
3323
3324 /// Returns the settings for the language at the given location.
3325 pub fn settings_at<'a, D: ToOffset>(
3326 &'a self,
3327 position: D,
3328 cx: &'a App,
3329 ) -> Cow<'a, LanguageSettings> {
3330 language_settings(
3331 self.language_at(position).map(|l| l.name()),
3332 self.file.as_ref(),
3333 cx,
3334 )
3335 }
3336
3337 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3338 CharClassifier::new(self.language_scope_at(point))
3339 }
3340
3341 /// Returns the [`LanguageScope`] at the given location.
3342 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3343 let offset = position.to_offset(self);
3344 let mut scope = None;
3345 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3346
3347 // Use the layer that has the smallest node intersecting the given point.
3348 for layer in self
3349 .syntax
3350 .layers_for_range(offset..offset, &self.text, false)
3351 {
3352 let mut cursor = layer.node().walk();
3353
3354 let mut range = None;
3355 loop {
3356 let child_range = cursor.node().byte_range();
3357 if !child_range.contains(&offset) {
3358 break;
3359 }
3360
3361 range = Some(child_range);
3362 if cursor.goto_first_child_for_byte(offset).is_none() {
3363 break;
3364 }
3365 }
3366
3367 if let Some(range) = range
3368 && smallest_range_and_depth.as_ref().map_or(
3369 true,
3370 |(smallest_range, smallest_range_depth)| {
3371 if layer.depth > *smallest_range_depth {
3372 true
3373 } else if layer.depth == *smallest_range_depth {
3374 range.len() < smallest_range.len()
3375 } else {
3376 false
3377 }
3378 },
3379 )
3380 {
3381 smallest_range_and_depth = Some((range, layer.depth));
3382 scope = Some(LanguageScope {
3383 language: layer.language.clone(),
3384 override_id: layer.override_id(offset, &self.text),
3385 });
3386 }
3387 }
3388
3389 scope.or_else(|| {
3390 self.language.clone().map(|language| LanguageScope {
3391 language,
3392 override_id: None,
3393 })
3394 })
3395 }
3396
3397 /// Returns a tuple of the range and character kind of the word
3398 /// surrounding the given position.
3399 pub fn surrounding_word<T: ToOffset>(
3400 &self,
3401 start: T,
3402 for_completion: bool,
3403 ) -> (Range<usize>, Option<CharKind>) {
3404 let mut start = start.to_offset(self);
3405 let mut end = start;
3406 let mut next_chars = self.chars_at(start).take(128).peekable();
3407 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3408
3409 let classifier = self
3410 .char_classifier_at(start)
3411 .for_completion(for_completion);
3412 let word_kind = cmp::max(
3413 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3414 next_chars.peek().copied().map(|c| classifier.kind(c)),
3415 );
3416
3417 for ch in prev_chars {
3418 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3419 start -= ch.len_utf8();
3420 } else {
3421 break;
3422 }
3423 }
3424
3425 for ch in next_chars {
3426 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3427 end += ch.len_utf8();
3428 } else {
3429 break;
3430 }
3431 }
3432
3433 (start..end, word_kind)
3434 }
3435
3436 /// Returns the closest syntax node enclosing the given range.
3437 pub fn syntax_ancestor<'a, T: ToOffset>(
3438 &'a self,
3439 range: Range<T>,
3440 ) -> Option<tree_sitter::Node<'a>> {
3441 let range = range.start.to_offset(self)..range.end.to_offset(self);
3442 let mut result: Option<tree_sitter::Node<'a>> = None;
3443 'outer: for layer in self
3444 .syntax
3445 .layers_for_range(range.clone(), &self.text, true)
3446 {
3447 let mut cursor = layer.node().walk();
3448
3449 // Descend to the first leaf that touches the start of the range.
3450 //
3451 // If the range is non-empty and the current node ends exactly at the start,
3452 // move to the next sibling to find a node that extends beyond the start.
3453 //
3454 // If the range is empty and the current node starts after the range position,
3455 // move to the previous sibling to find the node that contains the position.
3456 while cursor.goto_first_child_for_byte(range.start).is_some() {
3457 if !range.is_empty() && cursor.node().end_byte() == range.start {
3458 cursor.goto_next_sibling();
3459 }
3460 if range.is_empty() && cursor.node().start_byte() > range.start {
3461 cursor.goto_previous_sibling();
3462 }
3463 }
3464
3465 // Ascend to the smallest ancestor that strictly contains the range.
3466 loop {
3467 let node_range = cursor.node().byte_range();
3468 if node_range.start <= range.start
3469 && node_range.end >= range.end
3470 && node_range.len() > range.len()
3471 {
3472 break;
3473 }
3474 if !cursor.goto_parent() {
3475 continue 'outer;
3476 }
3477 }
3478
3479 let left_node = cursor.node();
3480 let mut layer_result = left_node;
3481
3482 // For an empty range, try to find another node immediately to the right of the range.
3483 if left_node.end_byte() == range.start {
3484 let mut right_node = None;
3485 while !cursor.goto_next_sibling() {
3486 if !cursor.goto_parent() {
3487 break;
3488 }
3489 }
3490
3491 while cursor.node().start_byte() == range.start {
3492 right_node = Some(cursor.node());
3493 if !cursor.goto_first_child() {
3494 break;
3495 }
3496 }
3497
3498 // If there is a candidate node on both sides of the (empty) range, then
3499 // decide between the two by favoring a named node over an anonymous token.
3500 // If both nodes are the same in that regard, favor the right one.
3501 if let Some(right_node) = right_node
3502 && (right_node.is_named() || !left_node.is_named())
3503 {
3504 layer_result = right_node;
3505 }
3506 }
3507
3508 if let Some(previous_result) = &result
3509 && previous_result.byte_range().len() < layer_result.byte_range().len()
3510 {
3511 continue;
3512 }
3513 result = Some(layer_result);
3514 }
3515
3516 result
3517 }
3518
3519 /// Returns the root syntax node within the given row
3520 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3521 let start_offset = position.to_offset(self);
3522
3523 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3524
3525 let layer = self
3526 .syntax
3527 .layers_for_range(start_offset..start_offset, &self.text, true)
3528 .next()?;
3529
3530 let mut cursor = layer.node().walk();
3531
3532 // Descend to the first leaf that touches the start of the range.
3533 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3534 if cursor.node().end_byte() == start_offset {
3535 cursor.goto_next_sibling();
3536 }
3537 }
3538
3539 // Ascend to the root node within the same row.
3540 while cursor.goto_parent() {
3541 if cursor.node().start_position().row != row {
3542 break;
3543 }
3544 }
3545
3546 return Some(cursor.node());
3547 }
3548
3549 /// Returns the outline for the buffer.
3550 ///
3551 /// This method allows passing an optional [`SyntaxTheme`] to
3552 /// syntax-highlight the returned symbols.
3553 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3554 self.outline_items_containing(0..self.len(), true, theme)
3555 .map(Outline::new)
3556 }
3557
3558 /// Returns all the symbols that contain the given position.
3559 ///
3560 /// This method allows passing an optional [`SyntaxTheme`] to
3561 /// syntax-highlight the returned symbols.
3562 pub fn symbols_containing<T: ToOffset>(
3563 &self,
3564 position: T,
3565 theme: Option<&SyntaxTheme>,
3566 ) -> Option<Vec<OutlineItem<Anchor>>> {
3567 let position = position.to_offset(self);
3568 let mut items = self.outline_items_containing(
3569 position.saturating_sub(1)..self.len().min(position + 1),
3570 false,
3571 theme,
3572 )?;
3573 let mut prev_depth = None;
3574 items.retain(|item| {
3575 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3576 prev_depth = Some(item.depth);
3577 result
3578 });
3579 Some(items)
3580 }
3581
3582 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3583 let range = range.to_offset(self);
3584 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3585 grammar.outline_config.as_ref().map(|c| &c.query)
3586 });
3587 let configs = matches
3588 .grammars()
3589 .iter()
3590 .map(|g| g.outline_config.as_ref().unwrap())
3591 .collect::<Vec<_>>();
3592
3593 while let Some(mat) = matches.peek() {
3594 let config = &configs[mat.grammar_index];
3595 let containing_item_node = maybe!({
3596 let item_node = mat.captures.iter().find_map(|cap| {
3597 if cap.index == config.item_capture_ix {
3598 Some(cap.node)
3599 } else {
3600 None
3601 }
3602 })?;
3603
3604 let item_byte_range = item_node.byte_range();
3605 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3606 None
3607 } else {
3608 Some(item_node)
3609 }
3610 });
3611
3612 if let Some(item_node) = containing_item_node {
3613 return Some(
3614 Point::from_ts_point(item_node.start_position())
3615 ..Point::from_ts_point(item_node.end_position()),
3616 );
3617 }
3618
3619 matches.advance();
3620 }
3621 None
3622 }
3623
3624 pub fn outline_items_containing<T: ToOffset>(
3625 &self,
3626 range: Range<T>,
3627 include_extra_context: bool,
3628 theme: Option<&SyntaxTheme>,
3629 ) -> Option<Vec<OutlineItem<Anchor>>> {
3630 let range = range.to_offset(self);
3631 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3632 grammar.outline_config.as_ref().map(|c| &c.query)
3633 });
3634 let configs = matches
3635 .grammars()
3636 .iter()
3637 .map(|g| g.outline_config.as_ref().unwrap())
3638 .collect::<Vec<_>>();
3639
3640 let mut items = Vec::new();
3641 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3642 while let Some(mat) = matches.peek() {
3643 let config = &configs[mat.grammar_index];
3644 if let Some(item) =
3645 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3646 {
3647 items.push(item);
3648 } else if let Some(capture) = mat
3649 .captures
3650 .iter()
3651 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3652 {
3653 let capture_range = capture.node.start_position()..capture.node.end_position();
3654 let mut capture_row_range =
3655 capture_range.start.row as u32..capture_range.end.row as u32;
3656 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3657 {
3658 capture_row_range.end -= 1;
3659 }
3660 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3661 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3662 last_row_range.end = capture_row_range.end;
3663 } else {
3664 annotation_row_ranges.push(capture_row_range);
3665 }
3666 } else {
3667 annotation_row_ranges.push(capture_row_range);
3668 }
3669 }
3670 matches.advance();
3671 }
3672
3673 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3674
3675 // Assign depths based on containment relationships and convert to anchors.
3676 let mut item_ends_stack = Vec::<Point>::new();
3677 let mut anchor_items = Vec::new();
3678 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3679 for item in items {
3680 while let Some(last_end) = item_ends_stack.last().copied() {
3681 if last_end < item.range.end {
3682 item_ends_stack.pop();
3683 } else {
3684 break;
3685 }
3686 }
3687
3688 let mut annotation_row_range = None;
3689 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3690 let row_preceding_item = item.range.start.row.saturating_sub(1);
3691 if next_annotation_row_range.end < row_preceding_item {
3692 annotation_row_ranges.next();
3693 } else {
3694 if next_annotation_row_range.end == row_preceding_item {
3695 annotation_row_range = Some(next_annotation_row_range.clone());
3696 annotation_row_ranges.next();
3697 }
3698 break;
3699 }
3700 }
3701
3702 anchor_items.push(OutlineItem {
3703 depth: item_ends_stack.len(),
3704 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3705 text: item.text,
3706 highlight_ranges: item.highlight_ranges,
3707 name_ranges: item.name_ranges,
3708 body_range: item.body_range.map(|body_range| {
3709 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3710 }),
3711 annotation_range: annotation_row_range.map(|annotation_range| {
3712 self.anchor_after(Point::new(annotation_range.start, 0))
3713 ..self.anchor_before(Point::new(
3714 annotation_range.end,
3715 self.line_len(annotation_range.end),
3716 ))
3717 }),
3718 });
3719 item_ends_stack.push(item.range.end);
3720 }
3721
3722 Some(anchor_items)
3723 }
3724
3725 fn next_outline_item(
3726 &self,
3727 config: &OutlineConfig,
3728 mat: &SyntaxMapMatch,
3729 range: &Range<usize>,
3730 include_extra_context: bool,
3731 theme: Option<&SyntaxTheme>,
3732 ) -> Option<OutlineItem<Point>> {
3733 let item_node = mat.captures.iter().find_map(|cap| {
3734 if cap.index == config.item_capture_ix {
3735 Some(cap.node)
3736 } else {
3737 None
3738 }
3739 })?;
3740
3741 let item_byte_range = item_node.byte_range();
3742 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3743 return None;
3744 }
3745 let item_point_range = Point::from_ts_point(item_node.start_position())
3746 ..Point::from_ts_point(item_node.end_position());
3747
3748 let mut open_point = None;
3749 let mut close_point = None;
3750 let mut buffer_ranges = Vec::new();
3751 for capture in mat.captures {
3752 let node_is_name;
3753 if capture.index == config.name_capture_ix {
3754 node_is_name = true;
3755 } else if Some(capture.index) == config.context_capture_ix
3756 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3757 {
3758 node_is_name = false;
3759 } else {
3760 if Some(capture.index) == config.open_capture_ix {
3761 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3762 } else if Some(capture.index) == config.close_capture_ix {
3763 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3764 }
3765
3766 continue;
3767 }
3768
3769 let mut range = capture.node.start_byte()..capture.node.end_byte();
3770 let start = capture.node.start_position();
3771 if capture.node.end_position().row > start.row {
3772 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3773 }
3774
3775 if !range.is_empty() {
3776 buffer_ranges.push((range, node_is_name));
3777 }
3778 }
3779 if buffer_ranges.is_empty() {
3780 return None;
3781 }
3782 let mut text = String::new();
3783 let mut highlight_ranges = Vec::new();
3784 let mut name_ranges = Vec::new();
3785 let mut chunks = self.chunks(
3786 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3787 true,
3788 );
3789 let mut last_buffer_range_end = 0;
3790
3791 for (buffer_range, is_name) in buffer_ranges {
3792 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3793 if space_added {
3794 text.push(' ');
3795 }
3796 let before_append_len = text.len();
3797 let mut offset = buffer_range.start;
3798 chunks.seek(buffer_range.clone());
3799 for mut chunk in chunks.by_ref() {
3800 if chunk.text.len() > buffer_range.end - offset {
3801 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3802 offset = buffer_range.end;
3803 } else {
3804 offset += chunk.text.len();
3805 }
3806 let style = chunk
3807 .syntax_highlight_id
3808 .zip(theme)
3809 .and_then(|(highlight, theme)| highlight.style(theme));
3810 if let Some(style) = style {
3811 let start = text.len();
3812 let end = start + chunk.text.len();
3813 highlight_ranges.push((start..end, style));
3814 }
3815 text.push_str(chunk.text);
3816 if offset >= buffer_range.end {
3817 break;
3818 }
3819 }
3820 if is_name {
3821 let after_append_len = text.len();
3822 let start = if space_added && !name_ranges.is_empty() {
3823 before_append_len - 1
3824 } else {
3825 before_append_len
3826 };
3827 name_ranges.push(start..after_append_len);
3828 }
3829 last_buffer_range_end = buffer_range.end;
3830 }
3831
3832 Some(OutlineItem {
3833 depth: 0, // We'll calculate the depth later
3834 range: item_point_range,
3835 text,
3836 highlight_ranges,
3837 name_ranges,
3838 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3839 annotation_range: None,
3840 })
3841 }
3842
3843 pub fn function_body_fold_ranges<T: ToOffset>(
3844 &self,
3845 within: Range<T>,
3846 ) -> impl Iterator<Item = Range<usize>> + '_ {
3847 self.text_object_ranges(within, TreeSitterOptions::default())
3848 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3849 }
3850
3851 /// For each grammar in the language, runs the provided
3852 /// [`tree_sitter::Query`] against the given range.
3853 pub fn matches(
3854 &self,
3855 range: Range<usize>,
3856 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3857 ) -> SyntaxMapMatches<'_> {
3858 self.syntax.matches(range, self, query)
3859 }
3860
3861 pub fn all_bracket_ranges(
3862 &self,
3863 range: Range<usize>,
3864 ) -> impl Iterator<Item = BracketMatch> + '_ {
3865 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3866 grammar.brackets_config.as_ref().map(|c| &c.query)
3867 });
3868 let configs = matches
3869 .grammars()
3870 .iter()
3871 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3872 .collect::<Vec<_>>();
3873
3874 iter::from_fn(move || {
3875 while let Some(mat) = matches.peek() {
3876 let mut open = None;
3877 let mut close = None;
3878 let config = &configs[mat.grammar_index];
3879 let pattern = &config.patterns[mat.pattern_index];
3880 for capture in mat.captures {
3881 if capture.index == config.open_capture_ix {
3882 open = Some(capture.node.byte_range());
3883 } else if capture.index == config.close_capture_ix {
3884 close = Some(capture.node.byte_range());
3885 }
3886 }
3887
3888 matches.advance();
3889
3890 let Some((open_range, close_range)) = open.zip(close) else {
3891 continue;
3892 };
3893
3894 let bracket_range = open_range.start..=close_range.end;
3895 if !bracket_range.overlaps(&range) {
3896 continue;
3897 }
3898
3899 return Some(BracketMatch {
3900 open_range,
3901 close_range,
3902 newline_only: pattern.newline_only,
3903 });
3904 }
3905 None
3906 })
3907 }
3908
3909 /// Returns bracket range pairs overlapping or adjacent to `range`
3910 pub fn bracket_ranges<T: ToOffset>(
3911 &self,
3912 range: Range<T>,
3913 ) -> impl Iterator<Item = BracketMatch> + '_ {
3914 // Find bracket pairs that *inclusively* contain the given range.
3915 let range = range.start.to_offset(self).saturating_sub(1)
3916 ..self.len().min(range.end.to_offset(self) + 1);
3917 self.all_bracket_ranges(range)
3918 .filter(|pair| !pair.newline_only)
3919 }
3920
3921 pub fn debug_variables_query<T: ToOffset>(
3922 &self,
3923 range: Range<T>,
3924 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3925 let range = range.start.to_offset(self).saturating_sub(1)
3926 ..self.len().min(range.end.to_offset(self) + 1);
3927
3928 let mut matches = self.syntax.matches_with_options(
3929 range.clone(),
3930 &self.text,
3931 TreeSitterOptions::default(),
3932 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3933 );
3934
3935 let configs = matches
3936 .grammars()
3937 .iter()
3938 .map(|grammar| grammar.debug_variables_config.as_ref())
3939 .collect::<Vec<_>>();
3940
3941 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3942
3943 iter::from_fn(move || {
3944 loop {
3945 while let Some(capture) = captures.pop() {
3946 if capture.0.overlaps(&range) {
3947 return Some(capture);
3948 }
3949 }
3950
3951 let mat = matches.peek()?;
3952
3953 let Some(config) = configs[mat.grammar_index].as_ref() else {
3954 matches.advance();
3955 continue;
3956 };
3957
3958 for capture in mat.captures {
3959 let Some(ix) = config
3960 .objects_by_capture_ix
3961 .binary_search_by_key(&capture.index, |e| e.0)
3962 .ok()
3963 else {
3964 continue;
3965 };
3966 let text_object = config.objects_by_capture_ix[ix].1;
3967 let byte_range = capture.node.byte_range();
3968
3969 let mut found = false;
3970 for (range, existing) in captures.iter_mut() {
3971 if existing == &text_object {
3972 range.start = range.start.min(byte_range.start);
3973 range.end = range.end.max(byte_range.end);
3974 found = true;
3975 break;
3976 }
3977 }
3978
3979 if !found {
3980 captures.push((byte_range, text_object));
3981 }
3982 }
3983
3984 matches.advance();
3985 }
3986 })
3987 }
3988
3989 pub fn text_object_ranges<T: ToOffset>(
3990 &self,
3991 range: Range<T>,
3992 options: TreeSitterOptions,
3993 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3994 let range = range.start.to_offset(self).saturating_sub(1)
3995 ..self.len().min(range.end.to_offset(self) + 1);
3996
3997 let mut matches =
3998 self.syntax
3999 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4000 grammar.text_object_config.as_ref().map(|c| &c.query)
4001 });
4002
4003 let configs = matches
4004 .grammars()
4005 .iter()
4006 .map(|grammar| grammar.text_object_config.as_ref())
4007 .collect::<Vec<_>>();
4008
4009 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4010
4011 iter::from_fn(move || {
4012 loop {
4013 while let Some(capture) = captures.pop() {
4014 if capture.0.overlaps(&range) {
4015 return Some(capture);
4016 }
4017 }
4018
4019 let mat = matches.peek()?;
4020
4021 let Some(config) = configs[mat.grammar_index].as_ref() else {
4022 matches.advance();
4023 continue;
4024 };
4025
4026 for capture in mat.captures {
4027 let Some(ix) = config
4028 .text_objects_by_capture_ix
4029 .binary_search_by_key(&capture.index, |e| e.0)
4030 .ok()
4031 else {
4032 continue;
4033 };
4034 let text_object = config.text_objects_by_capture_ix[ix].1;
4035 let byte_range = capture.node.byte_range();
4036
4037 let mut found = false;
4038 for (range, existing) in captures.iter_mut() {
4039 if existing == &text_object {
4040 range.start = range.start.min(byte_range.start);
4041 range.end = range.end.max(byte_range.end);
4042 found = true;
4043 break;
4044 }
4045 }
4046
4047 if !found {
4048 captures.push((byte_range, text_object));
4049 }
4050 }
4051
4052 matches.advance();
4053 }
4054 })
4055 }
4056
4057 /// Returns enclosing bracket ranges containing the given range
4058 pub fn enclosing_bracket_ranges<T: ToOffset>(
4059 &self,
4060 range: Range<T>,
4061 ) -> impl Iterator<Item = BracketMatch> + '_ {
4062 let range = range.start.to_offset(self)..range.end.to_offset(self);
4063
4064 self.bracket_ranges(range.clone()).filter(move |pair| {
4065 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4066 })
4067 }
4068
4069 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4070 ///
4071 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4072 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4073 &self,
4074 range: Range<T>,
4075 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4076 ) -> Option<(Range<usize>, Range<usize>)> {
4077 let range = range.start.to_offset(self)..range.end.to_offset(self);
4078
4079 // Get the ranges of the innermost pair of brackets.
4080 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4081
4082 for pair in self.enclosing_bracket_ranges(range.clone()) {
4083 if let Some(range_filter) = range_filter
4084 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4085 {
4086 continue;
4087 }
4088
4089 let len = pair.close_range.end - pair.open_range.start;
4090
4091 if let Some((existing_open, existing_close)) = &result {
4092 let existing_len = existing_close.end - existing_open.start;
4093 if len > existing_len {
4094 continue;
4095 }
4096 }
4097
4098 result = Some((pair.open_range, pair.close_range));
4099 }
4100
4101 result
4102 }
4103
4104 /// Returns anchor ranges for any matches of the redaction query.
4105 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4106 /// will be run on the relevant section of the buffer.
4107 pub fn redacted_ranges<T: ToOffset>(
4108 &self,
4109 range: Range<T>,
4110 ) -> impl Iterator<Item = Range<usize>> + '_ {
4111 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4112 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4113 grammar
4114 .redactions_config
4115 .as_ref()
4116 .map(|config| &config.query)
4117 });
4118
4119 let configs = syntax_matches
4120 .grammars()
4121 .iter()
4122 .map(|grammar| grammar.redactions_config.as_ref())
4123 .collect::<Vec<_>>();
4124
4125 iter::from_fn(move || {
4126 let redacted_range = syntax_matches
4127 .peek()
4128 .and_then(|mat| {
4129 configs[mat.grammar_index].and_then(|config| {
4130 mat.captures
4131 .iter()
4132 .find(|capture| capture.index == config.redaction_capture_ix)
4133 })
4134 })
4135 .map(|mat| mat.node.byte_range());
4136 syntax_matches.advance();
4137 redacted_range
4138 })
4139 }
4140
4141 pub fn injections_intersecting_range<T: ToOffset>(
4142 &self,
4143 range: Range<T>,
4144 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4145 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4146
4147 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4148 grammar
4149 .injection_config
4150 .as_ref()
4151 .map(|config| &config.query)
4152 });
4153
4154 let configs = syntax_matches
4155 .grammars()
4156 .iter()
4157 .map(|grammar| grammar.injection_config.as_ref())
4158 .collect::<Vec<_>>();
4159
4160 iter::from_fn(move || {
4161 let ranges = syntax_matches.peek().and_then(|mat| {
4162 let config = &configs[mat.grammar_index]?;
4163 let content_capture_range = mat.captures.iter().find_map(|capture| {
4164 if capture.index == config.content_capture_ix {
4165 Some(capture.node.byte_range())
4166 } else {
4167 None
4168 }
4169 })?;
4170 let language = self.language_at(content_capture_range.start)?;
4171 Some((content_capture_range, language))
4172 });
4173 syntax_matches.advance();
4174 ranges
4175 })
4176 }
4177
4178 pub fn runnable_ranges(
4179 &self,
4180 offset_range: Range<usize>,
4181 ) -> impl Iterator<Item = RunnableRange> + '_ {
4182 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4183 grammar.runnable_config.as_ref().map(|config| &config.query)
4184 });
4185
4186 let test_configs = syntax_matches
4187 .grammars()
4188 .iter()
4189 .map(|grammar| grammar.runnable_config.as_ref())
4190 .collect::<Vec<_>>();
4191
4192 iter::from_fn(move || {
4193 loop {
4194 let mat = syntax_matches.peek()?;
4195
4196 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4197 let mut run_range = None;
4198 let full_range = mat.captures.iter().fold(
4199 Range {
4200 start: usize::MAX,
4201 end: 0,
4202 },
4203 |mut acc, next| {
4204 let byte_range = next.node.byte_range();
4205 if acc.start > byte_range.start {
4206 acc.start = byte_range.start;
4207 }
4208 if acc.end < byte_range.end {
4209 acc.end = byte_range.end;
4210 }
4211 acc
4212 },
4213 );
4214 if full_range.start > full_range.end {
4215 // We did not find a full spanning range of this match.
4216 return None;
4217 }
4218 let extra_captures: SmallVec<[_; 1]> =
4219 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4220 test_configs
4221 .extra_captures
4222 .get(capture.index as usize)
4223 .cloned()
4224 .and_then(|tag_name| match tag_name {
4225 RunnableCapture::Named(name) => {
4226 Some((capture.node.byte_range(), name))
4227 }
4228 RunnableCapture::Run => {
4229 let _ = run_range.insert(capture.node.byte_range());
4230 None
4231 }
4232 })
4233 }));
4234 let run_range = run_range?;
4235 let tags = test_configs
4236 .query
4237 .property_settings(mat.pattern_index)
4238 .iter()
4239 .filter_map(|property| {
4240 if *property.key == *"tag" {
4241 property
4242 .value
4243 .as_ref()
4244 .map(|value| RunnableTag(value.to_string().into()))
4245 } else {
4246 None
4247 }
4248 })
4249 .collect();
4250 let extra_captures = extra_captures
4251 .into_iter()
4252 .map(|(range, name)| {
4253 (
4254 name.to_string(),
4255 self.text_for_range(range.clone()).collect::<String>(),
4256 )
4257 })
4258 .collect();
4259 // All tags should have the same range.
4260 Some(RunnableRange {
4261 run_range,
4262 full_range,
4263 runnable: Runnable {
4264 tags,
4265 language: mat.language,
4266 buffer: self.remote_id(),
4267 },
4268 extra_captures,
4269 buffer_id: self.remote_id(),
4270 })
4271 });
4272
4273 syntax_matches.advance();
4274 if test_range.is_some() {
4275 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4276 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4277 return test_range;
4278 }
4279 }
4280 })
4281 }
4282
4283 /// Returns selections for remote peers intersecting the given range.
4284 #[allow(clippy::type_complexity)]
4285 pub fn selections_in_range(
4286 &self,
4287 range: Range<Anchor>,
4288 include_local: bool,
4289 ) -> impl Iterator<
4290 Item = (
4291 ReplicaId,
4292 bool,
4293 CursorShape,
4294 impl Iterator<Item = &Selection<Anchor>> + '_,
4295 ),
4296 > + '_ {
4297 self.remote_selections
4298 .iter()
4299 .filter(move |(replica_id, set)| {
4300 (include_local || **replica_id != self.text.replica_id())
4301 && !set.selections.is_empty()
4302 })
4303 .map(move |(replica_id, set)| {
4304 let start_ix = match set.selections.binary_search_by(|probe| {
4305 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4306 }) {
4307 Ok(ix) | Err(ix) => ix,
4308 };
4309 let end_ix = match set.selections.binary_search_by(|probe| {
4310 probe.start.cmp(&range.end, self).then(Ordering::Less)
4311 }) {
4312 Ok(ix) | Err(ix) => ix,
4313 };
4314
4315 (
4316 *replica_id,
4317 set.line_mode,
4318 set.cursor_shape,
4319 set.selections[start_ix..end_ix].iter(),
4320 )
4321 })
4322 }
4323
4324 /// Returns if the buffer contains any diagnostics.
4325 pub fn has_diagnostics(&self) -> bool {
4326 !self.diagnostics.is_empty()
4327 }
4328
4329 /// Returns all the diagnostics intersecting the given range.
4330 pub fn diagnostics_in_range<'a, T, O>(
4331 &'a self,
4332 search_range: Range<T>,
4333 reversed: bool,
4334 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4335 where
4336 T: 'a + Clone + ToOffset,
4337 O: 'a + FromAnchor,
4338 {
4339 let mut iterators: Vec<_> = self
4340 .diagnostics
4341 .iter()
4342 .map(|(_, collection)| {
4343 collection
4344 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4345 .peekable()
4346 })
4347 .collect();
4348
4349 std::iter::from_fn(move || {
4350 let (next_ix, _) = iterators
4351 .iter_mut()
4352 .enumerate()
4353 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4354 .min_by(|(_, a), (_, b)| {
4355 let cmp = a
4356 .range
4357 .start
4358 .cmp(&b.range.start, self)
4359 // when range is equal, sort by diagnostic severity
4360 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4361 // and stabilize order with group_id
4362 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4363 if reversed { cmp.reverse() } else { cmp }
4364 })?;
4365 iterators[next_ix]
4366 .next()
4367 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4368 diagnostic,
4369 range: FromAnchor::from_anchor(&range.start, self)
4370 ..FromAnchor::from_anchor(&range.end, self),
4371 })
4372 })
4373 }
4374
4375 /// Returns all the diagnostic groups associated with the given
4376 /// language server ID. If no language server ID is provided,
4377 /// all diagnostics groups are returned.
4378 pub fn diagnostic_groups(
4379 &self,
4380 language_server_id: Option<LanguageServerId>,
4381 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4382 let mut groups = Vec::new();
4383
4384 if let Some(language_server_id) = language_server_id {
4385 if let Ok(ix) = self
4386 .diagnostics
4387 .binary_search_by_key(&language_server_id, |e| e.0)
4388 {
4389 self.diagnostics[ix]
4390 .1
4391 .groups(language_server_id, &mut groups, self);
4392 }
4393 } else {
4394 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4395 diagnostics.groups(*language_server_id, &mut groups, self);
4396 }
4397 }
4398
4399 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4400 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4401 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4402 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4403 });
4404
4405 groups
4406 }
4407
4408 /// Returns an iterator over the diagnostics for the given group.
4409 pub fn diagnostic_group<O>(
4410 &self,
4411 group_id: usize,
4412 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4413 where
4414 O: FromAnchor + 'static,
4415 {
4416 self.diagnostics
4417 .iter()
4418 .flat_map(move |(_, set)| set.group(group_id, self))
4419 }
4420
4421 /// An integer version number that accounts for all updates besides
4422 /// the buffer's text itself (which is versioned via a version vector).
4423 pub fn non_text_state_update_count(&self) -> usize {
4424 self.non_text_state_update_count
4425 }
4426
4427 /// An integer version that changes when the buffer's syntax changes.
4428 pub fn syntax_update_count(&self) -> usize {
4429 self.syntax.update_count()
4430 }
4431
4432 /// Returns a snapshot of underlying file.
4433 pub fn file(&self) -> Option<&Arc<dyn File>> {
4434 self.file.as_ref()
4435 }
4436
4437 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4438 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4439 if let Some(file) = self.file() {
4440 if file.path().file_name().is_none() || include_root {
4441 Some(file.full_path(cx))
4442 } else {
4443 Some(file.path().to_path_buf())
4444 }
4445 } else {
4446 None
4447 }
4448 }
4449
4450 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4451 let query_str = query.fuzzy_contents;
4452 if query_str.map_or(false, |query| query.is_empty()) {
4453 return BTreeMap::default();
4454 }
4455
4456 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4457 language,
4458 override_id: None,
4459 }));
4460
4461 let mut query_ix = 0;
4462 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4463 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4464
4465 let mut words = BTreeMap::default();
4466 let mut current_word_start_ix = None;
4467 let mut chunk_ix = query.range.start;
4468 for chunk in self.chunks(query.range, false) {
4469 for (i, c) in chunk.text.char_indices() {
4470 let ix = chunk_ix + i;
4471 if classifier.is_word(c) {
4472 if current_word_start_ix.is_none() {
4473 current_word_start_ix = Some(ix);
4474 }
4475
4476 if let Some(query_chars) = &query_chars
4477 && query_ix < query_len
4478 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4479 {
4480 query_ix += 1;
4481 }
4482 continue;
4483 } else if let Some(word_start) = current_word_start_ix.take()
4484 && query_ix == query_len
4485 {
4486 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4487 let mut word_text = self.text_for_range(word_start..ix).peekable();
4488 let first_char = word_text
4489 .peek()
4490 .and_then(|first_chunk| first_chunk.chars().next());
4491 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4492 if !query.skip_digits
4493 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4494 {
4495 words.insert(word_text.collect(), word_range);
4496 }
4497 }
4498 query_ix = 0;
4499 }
4500 chunk_ix += chunk.text.len();
4501 }
4502
4503 words
4504 }
4505}
4506
4507pub struct WordsQuery<'a> {
4508 /// Only returns words with all chars from the fuzzy string in them.
4509 pub fuzzy_contents: Option<&'a str>,
4510 /// Skips words that start with a digit.
4511 pub skip_digits: bool,
4512 /// Buffer offset range, to look for words.
4513 pub range: Range<usize>,
4514}
4515
4516fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4517 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4518}
4519
4520fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4521 let mut result = IndentSize::spaces(0);
4522 for c in text {
4523 let kind = match c {
4524 ' ' => IndentKind::Space,
4525 '\t' => IndentKind::Tab,
4526 _ => break,
4527 };
4528 if result.len == 0 {
4529 result.kind = kind;
4530 }
4531 result.len += 1;
4532 }
4533 result
4534}
4535
4536impl Clone for BufferSnapshot {
4537 fn clone(&self) -> Self {
4538 Self {
4539 text: self.text.clone(),
4540 syntax: self.syntax.clone(),
4541 file: self.file.clone(),
4542 remote_selections: self.remote_selections.clone(),
4543 diagnostics: self.diagnostics.clone(),
4544 language: self.language.clone(),
4545 non_text_state_update_count: self.non_text_state_update_count,
4546 }
4547 }
4548}
4549
4550impl Deref for BufferSnapshot {
4551 type Target = text::BufferSnapshot;
4552
4553 fn deref(&self) -> &Self::Target {
4554 &self.text
4555 }
4556}
4557
4558unsafe impl Send for BufferChunks<'_> {}
4559
4560impl<'a> BufferChunks<'a> {
4561 pub(crate) fn new(
4562 text: &'a Rope,
4563 range: Range<usize>,
4564 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4565 diagnostics: bool,
4566 buffer_snapshot: Option<&'a BufferSnapshot>,
4567 ) -> Self {
4568 let mut highlights = None;
4569 if let Some((captures, highlight_maps)) = syntax {
4570 highlights = Some(BufferChunkHighlights {
4571 captures,
4572 next_capture: None,
4573 stack: Default::default(),
4574 highlight_maps,
4575 })
4576 }
4577
4578 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4579 let chunks = text.chunks_in_range(range.clone());
4580
4581 let mut this = BufferChunks {
4582 range,
4583 buffer_snapshot,
4584 chunks,
4585 diagnostic_endpoints,
4586 error_depth: 0,
4587 warning_depth: 0,
4588 information_depth: 0,
4589 hint_depth: 0,
4590 unnecessary_depth: 0,
4591 underline: true,
4592 highlights,
4593 };
4594 this.initialize_diagnostic_endpoints();
4595 this
4596 }
4597
4598 /// Seeks to the given byte offset in the buffer.
4599 pub fn seek(&mut self, range: Range<usize>) {
4600 let old_range = std::mem::replace(&mut self.range, range.clone());
4601 self.chunks.set_range(self.range.clone());
4602 if let Some(highlights) = self.highlights.as_mut() {
4603 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4604 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4605 highlights
4606 .stack
4607 .retain(|(end_offset, _)| *end_offset > range.start);
4608 if let Some(capture) = &highlights.next_capture
4609 && range.start >= capture.node.start_byte()
4610 {
4611 let next_capture_end = capture.node.end_byte();
4612 if range.start < next_capture_end {
4613 highlights.stack.push((
4614 next_capture_end,
4615 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4616 ));
4617 }
4618 highlights.next_capture.take();
4619 }
4620 } else if let Some(snapshot) = self.buffer_snapshot {
4621 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4622 *highlights = BufferChunkHighlights {
4623 captures,
4624 next_capture: None,
4625 stack: Default::default(),
4626 highlight_maps,
4627 };
4628 } else {
4629 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4630 // Seeking such BufferChunks is not supported.
4631 debug_assert!(
4632 false,
4633 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4634 );
4635 }
4636
4637 highlights.captures.set_byte_range(self.range.clone());
4638 self.initialize_diagnostic_endpoints();
4639 }
4640 }
4641
4642 fn initialize_diagnostic_endpoints(&mut self) {
4643 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4644 && let Some(buffer) = self.buffer_snapshot
4645 {
4646 let mut diagnostic_endpoints = Vec::new();
4647 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4648 diagnostic_endpoints.push(DiagnosticEndpoint {
4649 offset: entry.range.start,
4650 is_start: true,
4651 severity: entry.diagnostic.severity,
4652 is_unnecessary: entry.diagnostic.is_unnecessary,
4653 underline: entry.diagnostic.underline,
4654 });
4655 diagnostic_endpoints.push(DiagnosticEndpoint {
4656 offset: entry.range.end,
4657 is_start: false,
4658 severity: entry.diagnostic.severity,
4659 is_unnecessary: entry.diagnostic.is_unnecessary,
4660 underline: entry.diagnostic.underline,
4661 });
4662 }
4663 diagnostic_endpoints
4664 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4665 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4666 self.hint_depth = 0;
4667 self.error_depth = 0;
4668 self.warning_depth = 0;
4669 self.information_depth = 0;
4670 }
4671 }
4672
4673 /// The current byte offset in the buffer.
4674 pub fn offset(&self) -> usize {
4675 self.range.start
4676 }
4677
4678 pub fn range(&self) -> Range<usize> {
4679 self.range.clone()
4680 }
4681
4682 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4683 let depth = match endpoint.severity {
4684 DiagnosticSeverity::ERROR => &mut self.error_depth,
4685 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4686 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4687 DiagnosticSeverity::HINT => &mut self.hint_depth,
4688 _ => return,
4689 };
4690 if endpoint.is_start {
4691 *depth += 1;
4692 } else {
4693 *depth -= 1;
4694 }
4695
4696 if endpoint.is_unnecessary {
4697 if endpoint.is_start {
4698 self.unnecessary_depth += 1;
4699 } else {
4700 self.unnecessary_depth -= 1;
4701 }
4702 }
4703 }
4704
4705 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4706 if self.error_depth > 0 {
4707 Some(DiagnosticSeverity::ERROR)
4708 } else if self.warning_depth > 0 {
4709 Some(DiagnosticSeverity::WARNING)
4710 } else if self.information_depth > 0 {
4711 Some(DiagnosticSeverity::INFORMATION)
4712 } else if self.hint_depth > 0 {
4713 Some(DiagnosticSeverity::HINT)
4714 } else {
4715 None
4716 }
4717 }
4718
4719 fn current_code_is_unnecessary(&self) -> bool {
4720 self.unnecessary_depth > 0
4721 }
4722}
4723
4724impl<'a> Iterator for BufferChunks<'a> {
4725 type Item = Chunk<'a>;
4726
4727 fn next(&mut self) -> Option<Self::Item> {
4728 let mut next_capture_start = usize::MAX;
4729 let mut next_diagnostic_endpoint = usize::MAX;
4730
4731 if let Some(highlights) = self.highlights.as_mut() {
4732 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4733 if *parent_capture_end <= self.range.start {
4734 highlights.stack.pop();
4735 } else {
4736 break;
4737 }
4738 }
4739
4740 if highlights.next_capture.is_none() {
4741 highlights.next_capture = highlights.captures.next();
4742 }
4743
4744 while let Some(capture) = highlights.next_capture.as_ref() {
4745 if self.range.start < capture.node.start_byte() {
4746 next_capture_start = capture.node.start_byte();
4747 break;
4748 } else {
4749 let highlight_id =
4750 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4751 highlights
4752 .stack
4753 .push((capture.node.end_byte(), highlight_id));
4754 highlights.next_capture = highlights.captures.next();
4755 }
4756 }
4757 }
4758
4759 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4760 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4761 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4762 if endpoint.offset <= self.range.start {
4763 self.update_diagnostic_depths(endpoint);
4764 diagnostic_endpoints.next();
4765 self.underline = endpoint.underline;
4766 } else {
4767 next_diagnostic_endpoint = endpoint.offset;
4768 break;
4769 }
4770 }
4771 }
4772 self.diagnostic_endpoints = diagnostic_endpoints;
4773
4774 if let Some(chunk) = self.chunks.peek() {
4775 let chunk_start = self.range.start;
4776 let mut chunk_end = (self.chunks.offset() + chunk.len())
4777 .min(next_capture_start)
4778 .min(next_diagnostic_endpoint);
4779 let mut highlight_id = None;
4780 if let Some(highlights) = self.highlights.as_ref()
4781 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4782 {
4783 chunk_end = chunk_end.min(*parent_capture_end);
4784 highlight_id = Some(*parent_highlight_id);
4785 }
4786
4787 let slice =
4788 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4789 self.range.start = chunk_end;
4790 if self.range.start == self.chunks.offset() + chunk.len() {
4791 self.chunks.next().unwrap();
4792 }
4793
4794 Some(Chunk {
4795 text: slice,
4796 syntax_highlight_id: highlight_id,
4797 underline: self.underline,
4798 diagnostic_severity: self.current_diagnostic_severity(),
4799 is_unnecessary: self.current_code_is_unnecessary(),
4800 ..Chunk::default()
4801 })
4802 } else {
4803 None
4804 }
4805 }
4806}
4807
4808impl operation_queue::Operation for Operation {
4809 fn lamport_timestamp(&self) -> clock::Lamport {
4810 match self {
4811 Operation::Buffer(_) => {
4812 unreachable!("buffer operations should never be deferred at this layer")
4813 }
4814 Operation::UpdateDiagnostics {
4815 lamport_timestamp, ..
4816 }
4817 | Operation::UpdateSelections {
4818 lamport_timestamp, ..
4819 }
4820 | Operation::UpdateCompletionTriggers {
4821 lamport_timestamp, ..
4822 } => *lamport_timestamp,
4823 }
4824 }
4825}
4826
4827impl Default for Diagnostic {
4828 fn default() -> Self {
4829 Self {
4830 source: Default::default(),
4831 source_kind: DiagnosticSourceKind::Other,
4832 code: None,
4833 code_description: None,
4834 severity: DiagnosticSeverity::ERROR,
4835 message: Default::default(),
4836 markdown: None,
4837 group_id: 0,
4838 is_primary: false,
4839 is_disk_based: false,
4840 is_unnecessary: false,
4841 underline: true,
4842 data: None,
4843 }
4844 }
4845}
4846
4847impl IndentSize {
4848 /// Returns an [`IndentSize`] representing the given spaces.
4849 pub fn spaces(len: u32) -> Self {
4850 Self {
4851 len,
4852 kind: IndentKind::Space,
4853 }
4854 }
4855
4856 /// Returns an [`IndentSize`] representing a tab.
4857 pub fn tab() -> Self {
4858 Self {
4859 len: 1,
4860 kind: IndentKind::Tab,
4861 }
4862 }
4863
4864 /// An iterator over the characters represented by this [`IndentSize`].
4865 pub fn chars(&self) -> impl Iterator<Item = char> {
4866 iter::repeat(self.char()).take(self.len as usize)
4867 }
4868
4869 /// The character representation of this [`IndentSize`].
4870 pub fn char(&self) -> char {
4871 match self.kind {
4872 IndentKind::Space => ' ',
4873 IndentKind::Tab => '\t',
4874 }
4875 }
4876
4877 /// Consumes the current [`IndentSize`] and returns a new one that has
4878 /// been shrunk or enlarged by the given size along the given direction.
4879 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4880 match direction {
4881 Ordering::Less => {
4882 if self.kind == size.kind && self.len >= size.len {
4883 self.len -= size.len;
4884 }
4885 }
4886 Ordering::Equal => {}
4887 Ordering::Greater => {
4888 if self.len == 0 {
4889 self = size;
4890 } else if self.kind == size.kind {
4891 self.len += size.len;
4892 }
4893 }
4894 }
4895 self
4896 }
4897
4898 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4899 match self.kind {
4900 IndentKind::Space => self.len as usize,
4901 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4902 }
4903 }
4904}
4905
4906#[cfg(any(test, feature = "test-support"))]
4907pub struct TestFile {
4908 pub path: Arc<Path>,
4909 pub root_name: String,
4910 pub local_root: Option<PathBuf>,
4911}
4912
4913#[cfg(any(test, feature = "test-support"))]
4914impl File for TestFile {
4915 fn path(&self) -> &Arc<Path> {
4916 &self.path
4917 }
4918
4919 fn full_path(&self, _: &gpui::App) -> PathBuf {
4920 PathBuf::from(&self.root_name).join(self.path.as_ref())
4921 }
4922
4923 fn as_local(&self) -> Option<&dyn LocalFile> {
4924 if self.local_root.is_some() {
4925 Some(self)
4926 } else {
4927 None
4928 }
4929 }
4930
4931 fn disk_state(&self) -> DiskState {
4932 unimplemented!()
4933 }
4934
4935 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4936 self.path().file_name().unwrap_or(self.root_name.as_ref())
4937 }
4938
4939 fn worktree_id(&self, _: &App) -> WorktreeId {
4940 WorktreeId::from_usize(0)
4941 }
4942
4943 fn to_proto(&self, _: &App) -> rpc::proto::File {
4944 unimplemented!()
4945 }
4946
4947 fn is_private(&self) -> bool {
4948 false
4949 }
4950}
4951
4952#[cfg(any(test, feature = "test-support"))]
4953impl LocalFile for TestFile {
4954 fn abs_path(&self, _cx: &App) -> PathBuf {
4955 PathBuf::from(self.local_root.as_ref().unwrap())
4956 .join(&self.root_name)
4957 .join(self.path.as_ref())
4958 }
4959
4960 fn load(&self, _cx: &App) -> Task<Result<String>> {
4961 unimplemented!()
4962 }
4963
4964 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4965 unimplemented!()
4966 }
4967}
4968
4969pub(crate) fn contiguous_ranges(
4970 values: impl Iterator<Item = u32>,
4971 max_len: usize,
4972) -> impl Iterator<Item = Range<u32>> {
4973 let mut values = values;
4974 let mut current_range: Option<Range<u32>> = None;
4975 std::iter::from_fn(move || {
4976 loop {
4977 if let Some(value) = values.next() {
4978 if let Some(range) = &mut current_range
4979 && value == range.end
4980 && range.len() < max_len
4981 {
4982 range.end += 1;
4983 continue;
4984 }
4985
4986 let prev_range = current_range.clone();
4987 current_range = Some(value..(value + 1));
4988 if prev_range.is_some() {
4989 return prev_range;
4990 }
4991 } else {
4992 return current_range.take();
4993 }
4994 }
4995 })
4996}
4997
4998#[derive(Default, Debug)]
4999pub struct CharClassifier {
5000 scope: Option<LanguageScope>,
5001 for_completion: bool,
5002 ignore_punctuation: bool,
5003}
5004
5005impl CharClassifier {
5006 pub fn new(scope: Option<LanguageScope>) -> Self {
5007 Self {
5008 scope,
5009 for_completion: false,
5010 ignore_punctuation: false,
5011 }
5012 }
5013
5014 pub fn for_completion(self, for_completion: bool) -> Self {
5015 Self {
5016 for_completion,
5017 ..self
5018 }
5019 }
5020
5021 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5022 Self {
5023 ignore_punctuation,
5024 ..self
5025 }
5026 }
5027
5028 pub fn is_whitespace(&self, c: char) -> bool {
5029 self.kind(c) == CharKind::Whitespace
5030 }
5031
5032 pub fn is_word(&self, c: char) -> bool {
5033 self.kind(c) == CharKind::Word
5034 }
5035
5036 pub fn is_punctuation(&self, c: char) -> bool {
5037 self.kind(c) == CharKind::Punctuation
5038 }
5039
5040 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5041 if c.is_alphanumeric() || c == '_' {
5042 return CharKind::Word;
5043 }
5044
5045 if let Some(scope) = &self.scope {
5046 let characters = if self.for_completion {
5047 scope.completion_query_characters()
5048 } else {
5049 scope.word_characters()
5050 };
5051 if let Some(characters) = characters
5052 && characters.contains(&c)
5053 {
5054 return CharKind::Word;
5055 }
5056 }
5057
5058 if c.is_whitespace() {
5059 return CharKind::Whitespace;
5060 }
5061
5062 if ignore_punctuation {
5063 CharKind::Word
5064 } else {
5065 CharKind::Punctuation
5066 }
5067 }
5068
5069 pub fn kind(&self, c: char) -> CharKind {
5070 self.kind_with(c, self.ignore_punctuation)
5071 }
5072}
5073
5074/// Find all of the ranges of whitespace that occur at the ends of lines
5075/// in the given rope.
5076///
5077/// This could also be done with a regex search, but this implementation
5078/// avoids copying text.
5079pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5080 let mut ranges = Vec::new();
5081
5082 let mut offset = 0;
5083 let mut prev_chunk_trailing_whitespace_range = 0..0;
5084 for chunk in rope.chunks() {
5085 let mut prev_line_trailing_whitespace_range = 0..0;
5086 for (i, line) in chunk.split('\n').enumerate() {
5087 let line_end_offset = offset + line.len();
5088 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5089 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5090
5091 if i == 0 && trimmed_line_len == 0 {
5092 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5093 }
5094 if !prev_line_trailing_whitespace_range.is_empty() {
5095 ranges.push(prev_line_trailing_whitespace_range);
5096 }
5097
5098 offset = line_end_offset + 1;
5099 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5100 }
5101
5102 offset -= 1;
5103 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5104 }
5105
5106 if !prev_chunk_trailing_whitespace_range.is_empty() {
5107 ranges.push(prev_chunk_trailing_whitespace_range);
5108 }
5109
5110 ranges
5111}