1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 &syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 &syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 &syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 &syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation {
1162 if let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169 }
1170
1171 fn on_base_buffer_event(
1172 &mut self,
1173 _: Entity<Buffer>,
1174 event: &BufferEvent,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let BufferEvent::Operation { operation, .. } = event else {
1178 return;
1179 };
1180 let Some(BufferBranchState {
1181 merged_operations, ..
1182 }) = &mut self.branch_state
1183 else {
1184 return;
1185 };
1186
1187 let mut operation_to_undo = None;
1188 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1189 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193 }
1194
1195 self.apply_ops([operation.clone()], cx);
1196
1197 if let Some(timestamp) = operation_to_undo {
1198 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1199 self.undo_operations(counts, cx);
1200 }
1201 }
1202
1203 #[cfg(test)]
1204 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1205 &self.text
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's raw text, without any
1209 /// language-related state like the syntax tree or diagnostics.
1210 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1211 self.text.snapshot()
1212 }
1213
1214 /// The file associated with the buffer, if any.
1215 pub fn file(&self) -> Option<&Arc<dyn File>> {
1216 self.file.as_ref()
1217 }
1218
1219 /// The version of the buffer that was last saved or reloaded from disk.
1220 pub fn saved_version(&self) -> &clock::Global {
1221 &self.saved_version
1222 }
1223
1224 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1225 pub fn saved_mtime(&self) -> Option<MTime> {
1226 self.saved_mtime
1227 }
1228
1229 /// Assign a language to the buffer.
1230 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1231 self.non_text_state_update_count += 1;
1232 self.syntax_map.lock().clear(&self.text);
1233 self.language = language;
1234 self.was_changed();
1235 self.reparse(cx);
1236 cx.emit(BufferEvent::LanguageChanged);
1237 }
1238
1239 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1240 /// other languages if parts of the buffer are written in different languages.
1241 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1242 self.syntax_map
1243 .lock()
1244 .set_language_registry(language_registry);
1245 }
1246
1247 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1248 self.syntax_map.lock().language_registry()
1249 }
1250
1251 /// Assign the buffer a new [`Capability`].
1252 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1253 self.capability = capability;
1254 cx.emit(BufferEvent::CapabilityChanged)
1255 }
1256
1257 /// This method is called to signal that the buffer has been saved.
1258 pub fn did_save(
1259 &mut self,
1260 version: clock::Global,
1261 mtime: Option<MTime>,
1262 cx: &mut Context<Self>,
1263 ) {
1264 self.saved_version = version;
1265 self.has_unsaved_edits
1266 .set((self.saved_version().clone(), false));
1267 self.has_conflict = false;
1268 self.saved_mtime = mtime;
1269 self.was_changed();
1270 cx.emit(BufferEvent::Saved);
1271 cx.notify();
1272 }
1273
1274 /// This method is called to signal that the buffer has been discarded.
1275 pub fn discarded(&self, cx: &mut Context<Self>) {
1276 cx.emit(BufferEvent::Discarded);
1277 cx.notify();
1278 }
1279
1280 /// Reloads the contents of the buffer from disk.
1281 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1282 let (tx, rx) = futures::channel::oneshot::channel();
1283 let prev_version = self.text.version();
1284 self.reload_task = Some(cx.spawn(async move |this, cx| {
1285 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1286 let file = this.file.as_ref()?.as_local()?;
1287
1288 Some((file.disk_state().mtime(), file.load(cx)))
1289 })?
1290 else {
1291 return Ok(());
1292 };
1293
1294 let new_text = new_text.await?;
1295 let diff = this
1296 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1297 .await;
1298 this.update(cx, |this, cx| {
1299 if this.version() == diff.base_version {
1300 this.finalize_last_transaction();
1301 this.apply_diff(diff, cx);
1302 tx.send(this.finalize_last_transaction().cloned()).ok();
1303 this.has_conflict = false;
1304 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1305 } else {
1306 if !diff.edits.is_empty()
1307 || this
1308 .edits_since::<usize>(&diff.base_version)
1309 .next()
1310 .is_some()
1311 {
1312 this.has_conflict = true;
1313 }
1314
1315 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1316 }
1317
1318 this.reload_task.take();
1319 })
1320 }));
1321 rx
1322 }
1323
1324 /// This method is called to signal that the buffer has been reloaded.
1325 pub fn did_reload(
1326 &mut self,
1327 version: clock::Global,
1328 line_ending: LineEnding,
1329 mtime: Option<MTime>,
1330 cx: &mut Context<Self>,
1331 ) {
1332 self.saved_version = version;
1333 self.has_unsaved_edits
1334 .set((self.saved_version.clone(), false));
1335 self.text.set_line_ending(line_ending);
1336 self.saved_mtime = mtime;
1337 cx.emit(BufferEvent::Reloaded);
1338 cx.notify();
1339 }
1340
1341 /// Updates the [`File`] backing this buffer. This should be called when
1342 /// the file has changed or has been deleted.
1343 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1344 let was_dirty = self.is_dirty();
1345 let mut file_changed = false;
1346
1347 if let Some(old_file) = self.file.as_ref() {
1348 if new_file.path() != old_file.path() {
1349 file_changed = true;
1350 }
1351
1352 let old_state = old_file.disk_state();
1353 let new_state = new_file.disk_state();
1354 if old_state != new_state {
1355 file_changed = true;
1356 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1357 cx.emit(BufferEvent::ReloadNeeded)
1358 }
1359 }
1360 } else {
1361 file_changed = true;
1362 };
1363
1364 self.file = Some(new_file);
1365 if file_changed {
1366 self.was_changed();
1367 self.non_text_state_update_count += 1;
1368 if was_dirty != self.is_dirty() {
1369 cx.emit(BufferEvent::DirtyChanged);
1370 }
1371 cx.emit(BufferEvent::FileHandleChanged);
1372 cx.notify();
1373 }
1374 }
1375
1376 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1377 Some(self.branch_state.as_ref()?.base_buffer.clone())
1378 }
1379
1380 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1381 pub fn language(&self) -> Option<&Arc<Language>> {
1382 self.language.as_ref()
1383 }
1384
1385 /// Returns the [`Language`] at the given location.
1386 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1387 let offset = position.to_offset(self);
1388 let mut is_first = true;
1389 let start_anchor = self.anchor_before(offset);
1390 let end_anchor = self.anchor_after(offset);
1391 self.syntax_map
1392 .lock()
1393 .layers_for_range(offset..offset, &self.text, false)
1394 .filter(|layer| {
1395 if is_first {
1396 is_first = false;
1397 return true;
1398 }
1399 let any_sub_ranges_contain_range = layer
1400 .included_sub_ranges
1401 .map(|sub_ranges| {
1402 sub_ranges.iter().any(|sub_range| {
1403 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1404 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1405 !is_before_start && !is_after_end
1406 })
1407 })
1408 .unwrap_or(true);
1409 let result = any_sub_ranges_contain_range;
1410 return result;
1411 })
1412 .last()
1413 .map(|info| info.language.clone())
1414 .or_else(|| self.language.clone())
1415 }
1416
1417 /// Returns each [`Language`] for the active syntax layers at the given location.
1418 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1419 let offset = position.to_offset(self);
1420 let mut languages: Vec<Arc<Language>> = self
1421 .syntax_map
1422 .lock()
1423 .layers_for_range(offset..offset, &self.text, false)
1424 .map(|info| info.language.clone())
1425 .collect();
1426
1427 if languages.is_empty() {
1428 if let Some(buffer_language) = self.language() {
1429 languages.push(buffer_language.clone());
1430 }
1431 }
1432
1433 languages
1434 }
1435
1436 /// An integer version number that accounts for all updates besides
1437 /// the buffer's text itself (which is versioned via a version vector).
1438 pub fn non_text_state_update_count(&self) -> usize {
1439 self.non_text_state_update_count
1440 }
1441
1442 /// Whether the buffer is being parsed in the background.
1443 #[cfg(any(test, feature = "test-support"))]
1444 pub fn is_parsing(&self) -> bool {
1445 self.reparse.is_some()
1446 }
1447
1448 /// Indicates whether the buffer contains any regions that may be
1449 /// written in a language that hasn't been loaded yet.
1450 pub fn contains_unknown_injections(&self) -> bool {
1451 self.syntax_map.lock().contains_unknown_injections()
1452 }
1453
1454 #[cfg(any(test, feature = "test-support"))]
1455 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1456 self.sync_parse_timeout = timeout;
1457 }
1458
1459 /// Called after an edit to synchronize the buffer's main parse tree with
1460 /// the buffer's new underlying state.
1461 ///
1462 /// Locks the syntax map and interpolates the edits since the last reparse
1463 /// into the foreground syntax tree.
1464 ///
1465 /// Then takes a stable snapshot of the syntax map before unlocking it.
1466 /// The snapshot with the interpolated edits is sent to a background thread,
1467 /// where we ask Tree-sitter to perform an incremental parse.
1468 ///
1469 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1470 /// waiting on the parse to complete. As soon as it completes, we proceed
1471 /// synchronously, unless a 1ms timeout elapses.
1472 ///
1473 /// If we time out waiting on the parse, we spawn a second task waiting
1474 /// until the parse does complete and return with the interpolated tree still
1475 /// in the foreground. When the background parse completes, call back into
1476 /// the main thread and assign the foreground parse state.
1477 ///
1478 /// If the buffer or grammar changed since the start of the background parse,
1479 /// initiate an additional reparse recursively. To avoid concurrent parses
1480 /// for the same buffer, we only initiate a new parse if we are not already
1481 /// parsing in the background.
1482 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1483 if self.reparse.is_some() {
1484 return;
1485 }
1486 let language = if let Some(language) = self.language.clone() {
1487 language
1488 } else {
1489 return;
1490 };
1491
1492 let text = self.text_snapshot();
1493 let parsed_version = self.version();
1494
1495 let mut syntax_map = self.syntax_map.lock();
1496 syntax_map.interpolate(&text);
1497 let language_registry = syntax_map.language_registry();
1498 let mut syntax_snapshot = syntax_map.snapshot();
1499 drop(syntax_map);
1500
1501 let parse_task = cx.background_spawn({
1502 let language = language.clone();
1503 let language_registry = language_registry.clone();
1504 async move {
1505 syntax_snapshot.reparse(&text, language_registry, language);
1506 syntax_snapshot
1507 }
1508 });
1509
1510 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1511 match cx
1512 .background_executor()
1513 .block_with_timeout(self.sync_parse_timeout, parse_task)
1514 {
1515 Ok(new_syntax_snapshot) => {
1516 self.did_finish_parsing(new_syntax_snapshot, cx);
1517 self.reparse = None;
1518 }
1519 Err(parse_task) => {
1520 self.reparse = Some(cx.spawn(async move |this, cx| {
1521 let new_syntax_map = parse_task.await;
1522 this.update(cx, move |this, cx| {
1523 let grammar_changed =
1524 this.language.as_ref().map_or(true, |current_language| {
1525 !Arc::ptr_eq(&language, current_language)
1526 });
1527 let language_registry_changed = new_syntax_map
1528 .contains_unknown_injections()
1529 && language_registry.map_or(false, |registry| {
1530 registry.version() != new_syntax_map.language_registry_version()
1531 });
1532 let parse_again = language_registry_changed
1533 || grammar_changed
1534 || this.version.changed_since(&parsed_version);
1535 this.did_finish_parsing(new_syntax_map, cx);
1536 this.reparse = None;
1537 if parse_again {
1538 this.reparse(cx);
1539 }
1540 })
1541 .ok();
1542 }));
1543 }
1544 }
1545 }
1546
1547 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1548 self.was_changed();
1549 self.non_text_state_update_count += 1;
1550 self.syntax_map.lock().did_parse(syntax_snapshot);
1551 self.request_autoindent(cx);
1552 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1553 cx.emit(BufferEvent::Reparsed);
1554 cx.notify();
1555 }
1556
1557 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1558 self.parse_status.1.clone()
1559 }
1560
1561 /// Assign to the buffer a set of diagnostics created by a given language server.
1562 pub fn update_diagnostics(
1563 &mut self,
1564 server_id: LanguageServerId,
1565 diagnostics: DiagnosticSet,
1566 cx: &mut Context<Self>,
1567 ) {
1568 let lamport_timestamp = self.text.lamport_clock.tick();
1569 let op = Operation::UpdateDiagnostics {
1570 server_id,
1571 diagnostics: diagnostics.iter().cloned().collect(),
1572 lamport_timestamp,
1573 };
1574 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1575 self.send_operation(op, true, cx);
1576 }
1577
1578 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1579 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1580 return None;
1581 };
1582 Some(&self.diagnostics[idx].1)
1583 }
1584
1585 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1586 if let Some(indent_sizes) = self.compute_autoindents() {
1587 let indent_sizes = cx.background_spawn(indent_sizes);
1588 match cx
1589 .background_executor()
1590 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1591 {
1592 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1593 Err(indent_sizes) => {
1594 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1595 let indent_sizes = indent_sizes.await;
1596 this.update(cx, |this, cx| {
1597 this.apply_autoindents(indent_sizes, cx);
1598 })
1599 .ok();
1600 }));
1601 }
1602 }
1603 } else {
1604 self.autoindent_requests.clear();
1605 for tx in self.wait_for_autoindent_txs.drain(..) {
1606 tx.send(()).ok();
1607 }
1608 }
1609 }
1610
1611 fn compute_autoindents(
1612 &self,
1613 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1614 let max_rows_between_yields = 100;
1615 let snapshot = self.snapshot();
1616 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1617 return None;
1618 }
1619
1620 let autoindent_requests = self.autoindent_requests.clone();
1621 Some(async move {
1622 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1623 for request in autoindent_requests {
1624 // Resolve each edited range to its row in the current buffer and in the
1625 // buffer before this batch of edits.
1626 let mut row_ranges = Vec::new();
1627 let mut old_to_new_rows = BTreeMap::new();
1628 let mut language_indent_sizes_by_new_row = Vec::new();
1629 for entry in &request.entries {
1630 let position = entry.range.start;
1631 let new_row = position.to_point(&snapshot).row;
1632 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1633 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1634
1635 if !entry.first_line_is_new {
1636 let old_row = position.to_point(&request.before_edit).row;
1637 old_to_new_rows.insert(old_row, new_row);
1638 }
1639 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1640 }
1641
1642 // Build a map containing the suggested indentation for each of the edited lines
1643 // with respect to the state of the buffer before these edits. This map is keyed
1644 // by the rows for these lines in the current state of the buffer.
1645 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1646 let old_edited_ranges =
1647 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1648 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1649 let mut language_indent_size = IndentSize::default();
1650 for old_edited_range in old_edited_ranges {
1651 let suggestions = request
1652 .before_edit
1653 .suggest_autoindents(old_edited_range.clone())
1654 .into_iter()
1655 .flatten();
1656 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1657 if let Some(suggestion) = suggestion {
1658 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1659
1660 // Find the indent size based on the language for this row.
1661 while let Some((row, size)) = language_indent_sizes.peek() {
1662 if *row > new_row {
1663 break;
1664 }
1665 language_indent_size = *size;
1666 language_indent_sizes.next();
1667 }
1668
1669 let suggested_indent = old_to_new_rows
1670 .get(&suggestion.basis_row)
1671 .and_then(|from_row| {
1672 Some(old_suggestions.get(from_row).copied()?.0)
1673 })
1674 .unwrap_or_else(|| {
1675 request
1676 .before_edit
1677 .indent_size_for_line(suggestion.basis_row)
1678 })
1679 .with_delta(suggestion.delta, language_indent_size);
1680 old_suggestions
1681 .insert(new_row, (suggested_indent, suggestion.within_error));
1682 }
1683 }
1684 yield_now().await;
1685 }
1686
1687 // Compute new suggestions for each line, but only include them in the result
1688 // if they differ from the old suggestion for that line.
1689 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1690 let mut language_indent_size = IndentSize::default();
1691 for (row_range, original_indent_column) in row_ranges {
1692 let new_edited_row_range = if request.is_block_mode {
1693 row_range.start..row_range.start + 1
1694 } else {
1695 row_range.clone()
1696 };
1697
1698 let suggestions = snapshot
1699 .suggest_autoindents(new_edited_row_range.clone())
1700 .into_iter()
1701 .flatten();
1702 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1703 if let Some(suggestion) = suggestion {
1704 // Find the indent size based on the language for this row.
1705 while let Some((row, size)) = language_indent_sizes.peek() {
1706 if *row > new_row {
1707 break;
1708 }
1709 language_indent_size = *size;
1710 language_indent_sizes.next();
1711 }
1712
1713 let suggested_indent = indent_sizes
1714 .get(&suggestion.basis_row)
1715 .copied()
1716 .map(|e| e.0)
1717 .unwrap_or_else(|| {
1718 snapshot.indent_size_for_line(suggestion.basis_row)
1719 })
1720 .with_delta(suggestion.delta, language_indent_size);
1721
1722 if old_suggestions.get(&new_row).map_or(
1723 true,
1724 |(old_indentation, was_within_error)| {
1725 suggested_indent != *old_indentation
1726 && (!suggestion.within_error || *was_within_error)
1727 },
1728 ) {
1729 indent_sizes.insert(
1730 new_row,
1731 (suggested_indent, request.ignore_empty_lines),
1732 );
1733 }
1734 }
1735 }
1736
1737 if let (true, Some(original_indent_column)) =
1738 (request.is_block_mode, original_indent_column)
1739 {
1740 let new_indent =
1741 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1742 *indent
1743 } else {
1744 snapshot.indent_size_for_line(row_range.start)
1745 };
1746 let delta = new_indent.len as i64 - original_indent_column as i64;
1747 if delta != 0 {
1748 for row in row_range.skip(1) {
1749 indent_sizes.entry(row).or_insert_with(|| {
1750 let mut size = snapshot.indent_size_for_line(row);
1751 if size.kind == new_indent.kind {
1752 match delta.cmp(&0) {
1753 Ordering::Greater => size.len += delta as u32,
1754 Ordering::Less => {
1755 size.len = size.len.saturating_sub(-delta as u32)
1756 }
1757 Ordering::Equal => {}
1758 }
1759 }
1760 (size, request.ignore_empty_lines)
1761 });
1762 }
1763 }
1764 }
1765
1766 yield_now().await;
1767 }
1768 }
1769
1770 indent_sizes
1771 .into_iter()
1772 .filter_map(|(row, (indent, ignore_empty_lines))| {
1773 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1774 None
1775 } else {
1776 Some((row, indent))
1777 }
1778 })
1779 .collect()
1780 })
1781 }
1782
1783 fn apply_autoindents(
1784 &mut self,
1785 indent_sizes: BTreeMap<u32, IndentSize>,
1786 cx: &mut Context<Self>,
1787 ) {
1788 self.autoindent_requests.clear();
1789 for tx in self.wait_for_autoindent_txs.drain(..) {
1790 tx.send(()).ok();
1791 }
1792
1793 let edits: Vec<_> = indent_sizes
1794 .into_iter()
1795 .filter_map(|(row, indent_size)| {
1796 let current_size = indent_size_for_line(self, row);
1797 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1798 })
1799 .collect();
1800
1801 let preserve_preview = self.preserve_preview();
1802 self.edit(edits, None, cx);
1803 if preserve_preview {
1804 self.refresh_preview();
1805 }
1806 }
1807
1808 /// Create a minimal edit that will cause the given row to be indented
1809 /// with the given size. After applying this edit, the length of the line
1810 /// will always be at least `new_size.len`.
1811 pub fn edit_for_indent_size_adjustment(
1812 row: u32,
1813 current_size: IndentSize,
1814 new_size: IndentSize,
1815 ) -> Option<(Range<Point>, String)> {
1816 if new_size.kind == current_size.kind {
1817 match new_size.len.cmp(¤t_size.len) {
1818 Ordering::Greater => {
1819 let point = Point::new(row, 0);
1820 Some((
1821 point..point,
1822 iter::repeat(new_size.char())
1823 .take((new_size.len - current_size.len) as usize)
1824 .collect::<String>(),
1825 ))
1826 }
1827
1828 Ordering::Less => Some((
1829 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1830 String::new(),
1831 )),
1832
1833 Ordering::Equal => None,
1834 }
1835 } else {
1836 Some((
1837 Point::new(row, 0)..Point::new(row, current_size.len),
1838 iter::repeat(new_size.char())
1839 .take(new_size.len as usize)
1840 .collect::<String>(),
1841 ))
1842 }
1843 }
1844
1845 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1846 /// and the given new text.
1847 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1848 let old_text = self.as_rope().clone();
1849 let base_version = self.version();
1850 cx.background_executor()
1851 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1852 let old_text = old_text.to_string();
1853 let line_ending = LineEnding::detect(&new_text);
1854 LineEnding::normalize(&mut new_text);
1855 let edits = text_diff(&old_text, &new_text);
1856 Diff {
1857 base_version,
1858 line_ending,
1859 edits,
1860 }
1861 })
1862 }
1863
1864 /// Spawns a background task that searches the buffer for any whitespace
1865 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1866 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1867 let old_text = self.as_rope().clone();
1868 let line_ending = self.line_ending();
1869 let base_version = self.version();
1870 cx.background_spawn(async move {
1871 let ranges = trailing_whitespace_ranges(&old_text);
1872 let empty = Arc::<str>::from("");
1873 Diff {
1874 base_version,
1875 line_ending,
1876 edits: ranges
1877 .into_iter()
1878 .map(|range| (range, empty.clone()))
1879 .collect(),
1880 }
1881 })
1882 }
1883
1884 /// Ensures that the buffer ends with a single newline character, and
1885 /// no other whitespace. Skips if the buffer is empty.
1886 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1887 let len = self.len();
1888 if len == 0 {
1889 return;
1890 }
1891 let mut offset = len;
1892 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1893 let non_whitespace_len = chunk
1894 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1895 .len();
1896 offset -= chunk.len();
1897 offset += non_whitespace_len;
1898 if non_whitespace_len != 0 {
1899 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1900 return;
1901 }
1902 break;
1903 }
1904 }
1905 self.edit([(offset..len, "\n")], None, cx);
1906 }
1907
1908 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1909 /// calculated, then adjust the diff to account for those changes, and discard any
1910 /// parts of the diff that conflict with those changes.
1911 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1912 let snapshot = self.snapshot();
1913 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1914 let mut delta = 0;
1915 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1916 while let Some(edit_since) = edits_since.peek() {
1917 // If the edit occurs after a diff hunk, then it does not
1918 // affect that hunk.
1919 if edit_since.old.start > range.end {
1920 break;
1921 }
1922 // If the edit precedes the diff hunk, then adjust the hunk
1923 // to reflect the edit.
1924 else if edit_since.old.end < range.start {
1925 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1926 edits_since.next();
1927 }
1928 // If the edit intersects a diff hunk, then discard that hunk.
1929 else {
1930 return None;
1931 }
1932 }
1933
1934 let start = (range.start as i64 + delta) as usize;
1935 let end = (range.end as i64 + delta) as usize;
1936 Some((start..end, new_text))
1937 });
1938
1939 self.start_transaction();
1940 self.text.set_line_ending(diff.line_ending);
1941 self.edit(adjusted_edits, None, cx);
1942 self.end_transaction(cx)
1943 }
1944
1945 fn has_unsaved_edits(&self) -> bool {
1946 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1947
1948 if last_version == self.version {
1949 self.has_unsaved_edits
1950 .set((last_version, has_unsaved_edits));
1951 return has_unsaved_edits;
1952 }
1953
1954 let has_edits = self.has_edits_since(&self.saved_version);
1955 self.has_unsaved_edits
1956 .set((self.version.clone(), has_edits));
1957 has_edits
1958 }
1959
1960 /// Checks if the buffer has unsaved changes.
1961 pub fn is_dirty(&self) -> bool {
1962 if self.capability == Capability::ReadOnly {
1963 return false;
1964 }
1965 if self.has_conflict {
1966 return true;
1967 }
1968 match self.file.as_ref().map(|f| f.disk_state()) {
1969 Some(DiskState::New) | Some(DiskState::Deleted) => {
1970 !self.is_empty() && self.has_unsaved_edits()
1971 }
1972 _ => self.has_unsaved_edits(),
1973 }
1974 }
1975
1976 /// Checks if the buffer and its file have both changed since the buffer
1977 /// was last saved or reloaded.
1978 pub fn has_conflict(&self) -> bool {
1979 if self.has_conflict {
1980 return true;
1981 }
1982 let Some(file) = self.file.as_ref() else {
1983 return false;
1984 };
1985 match file.disk_state() {
1986 DiskState::New => false,
1987 DiskState::Present { mtime } => match self.saved_mtime {
1988 Some(saved_mtime) => {
1989 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1990 }
1991 None => true,
1992 },
1993 DiskState::Deleted => false,
1994 }
1995 }
1996
1997 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1998 pub fn subscribe(&mut self) -> Subscription {
1999 self.text.subscribe()
2000 }
2001
2002 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2003 ///
2004 /// This allows downstream code to check if the buffer's text has changed without
2005 /// waiting for an effect cycle, which would be required if using eents.
2006 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2007 if let Err(ix) = self
2008 .change_bits
2009 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2010 {
2011 self.change_bits.insert(ix, bit);
2012 }
2013 }
2014
2015 fn was_changed(&mut self) {
2016 self.change_bits.retain(|change_bit| {
2017 change_bit.upgrade().map_or(false, |bit| {
2018 bit.replace(true);
2019 true
2020 })
2021 });
2022 }
2023
2024 /// Starts a transaction, if one is not already in-progress. When undoing or
2025 /// redoing edits, all of the edits performed within a transaction are undone
2026 /// or redone together.
2027 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2028 self.start_transaction_at(Instant::now())
2029 }
2030
2031 /// Starts a transaction, providing the current time. Subsequent transactions
2032 /// that occur within a short period of time will be grouped together. This
2033 /// is controlled by the buffer's undo grouping duration.
2034 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2035 self.transaction_depth += 1;
2036 if self.was_dirty_before_starting_transaction.is_none() {
2037 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2038 }
2039 self.text.start_transaction_at(now)
2040 }
2041
2042 /// Terminates the current transaction, if this is the outermost transaction.
2043 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2044 self.end_transaction_at(Instant::now(), cx)
2045 }
2046
2047 /// Terminates the current transaction, providing the current time. Subsequent transactions
2048 /// that occur within a short period of time will be grouped together. This
2049 /// is controlled by the buffer's undo grouping duration.
2050 pub fn end_transaction_at(
2051 &mut self,
2052 now: Instant,
2053 cx: &mut Context<Self>,
2054 ) -> Option<TransactionId> {
2055 assert!(self.transaction_depth > 0);
2056 self.transaction_depth -= 1;
2057 let was_dirty = if self.transaction_depth == 0 {
2058 self.was_dirty_before_starting_transaction.take().unwrap()
2059 } else {
2060 false
2061 };
2062 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2063 self.did_edit(&start_version, was_dirty, cx);
2064 Some(transaction_id)
2065 } else {
2066 None
2067 }
2068 }
2069
2070 /// Manually add a transaction to the buffer's undo history.
2071 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2072 self.text.push_transaction(transaction, now);
2073 }
2074
2075 /// Prevent the last transaction from being grouped with any subsequent transactions,
2076 /// even if they occur with the buffer's undo grouping duration.
2077 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2078 self.text.finalize_last_transaction()
2079 }
2080
2081 /// Manually group all changes since a given transaction.
2082 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2083 self.text.group_until_transaction(transaction_id);
2084 }
2085
2086 /// Manually remove a transaction from the buffer's undo history
2087 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2088 self.text.forget_transaction(transaction_id)
2089 }
2090
2091 /// Retrieve a transaction from the buffer's undo history
2092 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2093 self.text.get_transaction(transaction_id)
2094 }
2095
2096 /// Manually merge two transactions in the buffer's undo history.
2097 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2098 self.text.merge_transactions(transaction, destination);
2099 }
2100
2101 /// Waits for the buffer to receive operations with the given timestamps.
2102 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2103 &mut self,
2104 edit_ids: It,
2105 ) -> impl Future<Output = Result<()>> + use<It> {
2106 self.text.wait_for_edits(edit_ids)
2107 }
2108
2109 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2110 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2111 &mut self,
2112 anchors: It,
2113 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2114 self.text.wait_for_anchors(anchors)
2115 }
2116
2117 /// Waits for the buffer to receive operations up to the given version.
2118 pub fn wait_for_version(
2119 &mut self,
2120 version: clock::Global,
2121 ) -> impl Future<Output = Result<()>> + use<> {
2122 self.text.wait_for_version(version)
2123 }
2124
2125 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2126 /// [`Buffer::wait_for_version`] to resolve with an error.
2127 pub fn give_up_waiting(&mut self) {
2128 self.text.give_up_waiting();
2129 }
2130
2131 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2132 let mut rx = None;
2133 if !self.autoindent_requests.is_empty() {
2134 let channel = oneshot::channel();
2135 self.wait_for_autoindent_txs.push(channel.0);
2136 rx = Some(channel.1);
2137 }
2138 rx
2139 }
2140
2141 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2142 pub fn set_active_selections(
2143 &mut self,
2144 selections: Arc<[Selection<Anchor>]>,
2145 line_mode: bool,
2146 cursor_shape: CursorShape,
2147 cx: &mut Context<Self>,
2148 ) {
2149 let lamport_timestamp = self.text.lamport_clock.tick();
2150 self.remote_selections.insert(
2151 self.text.replica_id(),
2152 SelectionSet {
2153 selections: selections.clone(),
2154 lamport_timestamp,
2155 line_mode,
2156 cursor_shape,
2157 },
2158 );
2159 self.send_operation(
2160 Operation::UpdateSelections {
2161 selections,
2162 line_mode,
2163 lamport_timestamp,
2164 cursor_shape,
2165 },
2166 true,
2167 cx,
2168 );
2169 self.non_text_state_update_count += 1;
2170 cx.notify();
2171 }
2172
2173 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2174 /// this replica.
2175 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2176 if self
2177 .remote_selections
2178 .get(&self.text.replica_id())
2179 .map_or(true, |set| !set.selections.is_empty())
2180 {
2181 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2182 }
2183 }
2184
2185 pub fn set_agent_selections(
2186 &mut self,
2187 selections: Arc<[Selection<Anchor>]>,
2188 line_mode: bool,
2189 cursor_shape: CursorShape,
2190 cx: &mut Context<Self>,
2191 ) {
2192 let lamport_timestamp = self.text.lamport_clock.tick();
2193 self.remote_selections.insert(
2194 AGENT_REPLICA_ID,
2195 SelectionSet {
2196 selections: selections.clone(),
2197 lamport_timestamp,
2198 line_mode,
2199 cursor_shape,
2200 },
2201 );
2202 self.non_text_state_update_count += 1;
2203 cx.notify();
2204 }
2205
2206 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2207 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2208 }
2209
2210 /// Replaces the buffer's entire text.
2211 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2212 where
2213 T: Into<Arc<str>>,
2214 {
2215 self.autoindent_requests.clear();
2216 self.edit([(0..self.len(), text)], None, cx)
2217 }
2218
2219 /// Appends the given text to the end of the buffer.
2220 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2221 where
2222 T: Into<Arc<str>>,
2223 {
2224 self.edit([(self.len()..self.len(), text)], None, cx)
2225 }
2226
2227 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2228 /// delete, and a string of text to insert at that location.
2229 ///
2230 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2231 /// request for the edited ranges, which will be processed when the buffer finishes
2232 /// parsing.
2233 ///
2234 /// Parsing takes place at the end of a transaction, and may compute synchronously
2235 /// or asynchronously, depending on the changes.
2236 pub fn edit<I, S, T>(
2237 &mut self,
2238 edits_iter: I,
2239 autoindent_mode: Option<AutoindentMode>,
2240 cx: &mut Context<Self>,
2241 ) -> Option<clock::Lamport>
2242 where
2243 I: IntoIterator<Item = (Range<S>, T)>,
2244 S: ToOffset,
2245 T: Into<Arc<str>>,
2246 {
2247 // Skip invalid edits and coalesce contiguous ones.
2248 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2249
2250 for (range, new_text) in edits_iter {
2251 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2252
2253 if range.start > range.end {
2254 mem::swap(&mut range.start, &mut range.end);
2255 }
2256 let new_text = new_text.into();
2257 if !new_text.is_empty() || !range.is_empty() {
2258 if let Some((prev_range, prev_text)) = edits.last_mut() {
2259 if prev_range.end >= range.start {
2260 prev_range.end = cmp::max(prev_range.end, range.end);
2261 *prev_text = format!("{prev_text}{new_text}").into();
2262 } else {
2263 edits.push((range, new_text));
2264 }
2265 } else {
2266 edits.push((range, new_text));
2267 }
2268 }
2269 }
2270 if edits.is_empty() {
2271 return None;
2272 }
2273
2274 self.start_transaction();
2275 self.pending_autoindent.take();
2276 let autoindent_request = autoindent_mode
2277 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2278
2279 let edit_operation = self.text.edit(edits.iter().cloned());
2280 let edit_id = edit_operation.timestamp();
2281
2282 if let Some((before_edit, mode)) = autoindent_request {
2283 let mut delta = 0isize;
2284 let entries = edits
2285 .into_iter()
2286 .enumerate()
2287 .zip(&edit_operation.as_edit().unwrap().new_text)
2288 .map(|((ix, (range, _)), new_text)| {
2289 let new_text_length = new_text.len();
2290 let old_start = range.start.to_point(&before_edit);
2291 let new_start = (delta + range.start as isize) as usize;
2292 let range_len = range.end - range.start;
2293 delta += new_text_length as isize - range_len as isize;
2294
2295 // Decide what range of the insertion to auto-indent, and whether
2296 // the first line of the insertion should be considered a newly-inserted line
2297 // or an edit to an existing line.
2298 let mut range_of_insertion_to_indent = 0..new_text_length;
2299 let mut first_line_is_new = true;
2300
2301 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2302 let old_line_end = before_edit.line_len(old_start.row);
2303
2304 if old_start.column > old_line_start {
2305 first_line_is_new = false;
2306 }
2307
2308 if !new_text.contains('\n')
2309 && (old_start.column + (range_len as u32) < old_line_end
2310 || old_line_end == old_line_start)
2311 {
2312 first_line_is_new = false;
2313 }
2314
2315 // When inserting text starting with a newline, avoid auto-indenting the
2316 // previous line.
2317 if new_text.starts_with('\n') {
2318 range_of_insertion_to_indent.start += 1;
2319 first_line_is_new = true;
2320 }
2321
2322 let mut original_indent_column = None;
2323 if let AutoindentMode::Block {
2324 original_indent_columns,
2325 } = &mode
2326 {
2327 original_indent_column = Some(if new_text.starts_with('\n') {
2328 indent_size_for_text(
2329 new_text[range_of_insertion_to_indent.clone()].chars(),
2330 )
2331 .len
2332 } else {
2333 original_indent_columns
2334 .get(ix)
2335 .copied()
2336 .flatten()
2337 .unwrap_or_else(|| {
2338 indent_size_for_text(
2339 new_text[range_of_insertion_to_indent.clone()].chars(),
2340 )
2341 .len
2342 })
2343 });
2344
2345 // Avoid auto-indenting the line after the edit.
2346 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2347 range_of_insertion_to_indent.end -= 1;
2348 }
2349 }
2350
2351 AutoindentRequestEntry {
2352 first_line_is_new,
2353 original_indent_column,
2354 indent_size: before_edit.language_indent_size_at(range.start, cx),
2355 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2356 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2357 }
2358 })
2359 .collect();
2360
2361 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2362 before_edit,
2363 entries,
2364 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2365 ignore_empty_lines: false,
2366 }));
2367 }
2368
2369 self.end_transaction(cx);
2370 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2371 Some(edit_id)
2372 }
2373
2374 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2375 self.was_changed();
2376
2377 if self.edits_since::<usize>(old_version).next().is_none() {
2378 return;
2379 }
2380
2381 self.reparse(cx);
2382 cx.emit(BufferEvent::Edited);
2383 if was_dirty != self.is_dirty() {
2384 cx.emit(BufferEvent::DirtyChanged);
2385 }
2386 cx.notify();
2387 }
2388
2389 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2390 where
2391 I: IntoIterator<Item = Range<T>>,
2392 T: ToOffset + Copy,
2393 {
2394 let before_edit = self.snapshot();
2395 let entries = ranges
2396 .into_iter()
2397 .map(|range| AutoindentRequestEntry {
2398 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2399 first_line_is_new: true,
2400 indent_size: before_edit.language_indent_size_at(range.start, cx),
2401 original_indent_column: None,
2402 })
2403 .collect();
2404 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2405 before_edit,
2406 entries,
2407 is_block_mode: false,
2408 ignore_empty_lines: true,
2409 }));
2410 self.request_autoindent(cx);
2411 }
2412
2413 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2414 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2415 pub fn insert_empty_line(
2416 &mut self,
2417 position: impl ToPoint,
2418 space_above: bool,
2419 space_below: bool,
2420 cx: &mut Context<Self>,
2421 ) -> Point {
2422 let mut position = position.to_point(self);
2423
2424 self.start_transaction();
2425
2426 self.edit(
2427 [(position..position, "\n")],
2428 Some(AutoindentMode::EachLine),
2429 cx,
2430 );
2431
2432 if position.column > 0 {
2433 position += Point::new(1, 0);
2434 }
2435
2436 if !self.is_line_blank(position.row) {
2437 self.edit(
2438 [(position..position, "\n")],
2439 Some(AutoindentMode::EachLine),
2440 cx,
2441 );
2442 }
2443
2444 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2445 self.edit(
2446 [(position..position, "\n")],
2447 Some(AutoindentMode::EachLine),
2448 cx,
2449 );
2450 position.row += 1;
2451 }
2452
2453 if space_below
2454 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2455 {
2456 self.edit(
2457 [(position..position, "\n")],
2458 Some(AutoindentMode::EachLine),
2459 cx,
2460 );
2461 }
2462
2463 self.end_transaction(cx);
2464
2465 position
2466 }
2467
2468 /// Applies the given remote operations to the buffer.
2469 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2470 self.pending_autoindent.take();
2471 let was_dirty = self.is_dirty();
2472 let old_version = self.version.clone();
2473 let mut deferred_ops = Vec::new();
2474 let buffer_ops = ops
2475 .into_iter()
2476 .filter_map(|op| match op {
2477 Operation::Buffer(op) => Some(op),
2478 _ => {
2479 if self.can_apply_op(&op) {
2480 self.apply_op(op, cx);
2481 } else {
2482 deferred_ops.push(op);
2483 }
2484 None
2485 }
2486 })
2487 .collect::<Vec<_>>();
2488 for operation in buffer_ops.iter() {
2489 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2490 }
2491 self.text.apply_ops(buffer_ops);
2492 self.deferred_ops.insert(deferred_ops);
2493 self.flush_deferred_ops(cx);
2494 self.did_edit(&old_version, was_dirty, cx);
2495 // Notify independently of whether the buffer was edited as the operations could include a
2496 // selection update.
2497 cx.notify();
2498 }
2499
2500 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2501 let mut deferred_ops = Vec::new();
2502 for op in self.deferred_ops.drain().iter().cloned() {
2503 if self.can_apply_op(&op) {
2504 self.apply_op(op, cx);
2505 } else {
2506 deferred_ops.push(op);
2507 }
2508 }
2509 self.deferred_ops.insert(deferred_ops);
2510 }
2511
2512 pub fn has_deferred_ops(&self) -> bool {
2513 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2514 }
2515
2516 fn can_apply_op(&self, operation: &Operation) -> bool {
2517 match operation {
2518 Operation::Buffer(_) => {
2519 unreachable!("buffer operations should never be applied at this layer")
2520 }
2521 Operation::UpdateDiagnostics {
2522 diagnostics: diagnostic_set,
2523 ..
2524 } => diagnostic_set.iter().all(|diagnostic| {
2525 self.text.can_resolve(&diagnostic.range.start)
2526 && self.text.can_resolve(&diagnostic.range.end)
2527 }),
2528 Operation::UpdateSelections { selections, .. } => selections
2529 .iter()
2530 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2531 Operation::UpdateCompletionTriggers { .. } => true,
2532 }
2533 }
2534
2535 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2536 match operation {
2537 Operation::Buffer(_) => {
2538 unreachable!("buffer operations should never be applied at this layer")
2539 }
2540 Operation::UpdateDiagnostics {
2541 server_id,
2542 diagnostics: diagnostic_set,
2543 lamport_timestamp,
2544 } => {
2545 let snapshot = self.snapshot();
2546 self.apply_diagnostic_update(
2547 server_id,
2548 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2549 lamport_timestamp,
2550 cx,
2551 );
2552 }
2553 Operation::UpdateSelections {
2554 selections,
2555 lamport_timestamp,
2556 line_mode,
2557 cursor_shape,
2558 } => {
2559 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2560 if set.lamport_timestamp > lamport_timestamp {
2561 return;
2562 }
2563 }
2564
2565 self.remote_selections.insert(
2566 lamport_timestamp.replica_id,
2567 SelectionSet {
2568 selections,
2569 lamport_timestamp,
2570 line_mode,
2571 cursor_shape,
2572 },
2573 );
2574 self.text.lamport_clock.observe(lamport_timestamp);
2575 self.non_text_state_update_count += 1;
2576 }
2577 Operation::UpdateCompletionTriggers {
2578 triggers,
2579 lamport_timestamp,
2580 server_id,
2581 } => {
2582 if triggers.is_empty() {
2583 self.completion_triggers_per_language_server
2584 .remove(&server_id);
2585 self.completion_triggers = self
2586 .completion_triggers_per_language_server
2587 .values()
2588 .flat_map(|triggers| triggers.into_iter().cloned())
2589 .collect();
2590 } else {
2591 self.completion_triggers_per_language_server
2592 .insert(server_id, triggers.iter().cloned().collect());
2593 self.completion_triggers.extend(triggers);
2594 }
2595 self.text.lamport_clock.observe(lamport_timestamp);
2596 }
2597 }
2598 }
2599
2600 fn apply_diagnostic_update(
2601 &mut self,
2602 server_id: LanguageServerId,
2603 diagnostics: DiagnosticSet,
2604 lamport_timestamp: clock::Lamport,
2605 cx: &mut Context<Self>,
2606 ) {
2607 if lamport_timestamp > self.diagnostics_timestamp {
2608 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2609 if diagnostics.is_empty() {
2610 if let Ok(ix) = ix {
2611 self.diagnostics.remove(ix);
2612 }
2613 } else {
2614 match ix {
2615 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2616 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2617 };
2618 }
2619 self.diagnostics_timestamp = lamport_timestamp;
2620 self.non_text_state_update_count += 1;
2621 self.text.lamport_clock.observe(lamport_timestamp);
2622 cx.notify();
2623 cx.emit(BufferEvent::DiagnosticsUpdated);
2624 }
2625 }
2626
2627 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2628 self.was_changed();
2629 cx.emit(BufferEvent::Operation {
2630 operation,
2631 is_local,
2632 });
2633 }
2634
2635 /// Removes the selections for a given peer.
2636 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2637 self.remote_selections.remove(&replica_id);
2638 cx.notify();
2639 }
2640
2641 /// Undoes the most recent transaction.
2642 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2643 let was_dirty = self.is_dirty();
2644 let old_version = self.version.clone();
2645
2646 if let Some((transaction_id, operation)) = self.text.undo() {
2647 self.send_operation(Operation::Buffer(operation), true, cx);
2648 self.did_edit(&old_version, was_dirty, cx);
2649 Some(transaction_id)
2650 } else {
2651 None
2652 }
2653 }
2654
2655 /// Manually undoes a specific transaction in the buffer's undo history.
2656 pub fn undo_transaction(
2657 &mut self,
2658 transaction_id: TransactionId,
2659 cx: &mut Context<Self>,
2660 ) -> bool {
2661 let was_dirty = self.is_dirty();
2662 let old_version = self.version.clone();
2663 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2664 self.send_operation(Operation::Buffer(operation), true, cx);
2665 self.did_edit(&old_version, was_dirty, cx);
2666 true
2667 } else {
2668 false
2669 }
2670 }
2671
2672 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2673 pub fn undo_to_transaction(
2674 &mut self,
2675 transaction_id: TransactionId,
2676 cx: &mut Context<Self>,
2677 ) -> bool {
2678 let was_dirty = self.is_dirty();
2679 let old_version = self.version.clone();
2680
2681 let operations = self.text.undo_to_transaction(transaction_id);
2682 let undone = !operations.is_empty();
2683 for operation in operations {
2684 self.send_operation(Operation::Buffer(operation), true, cx);
2685 }
2686 if undone {
2687 self.did_edit(&old_version, was_dirty, cx)
2688 }
2689 undone
2690 }
2691
2692 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2693 let was_dirty = self.is_dirty();
2694 let operation = self.text.undo_operations(counts);
2695 let old_version = self.version.clone();
2696 self.send_operation(Operation::Buffer(operation), true, cx);
2697 self.did_edit(&old_version, was_dirty, cx);
2698 }
2699
2700 /// Manually redoes a specific transaction in the buffer's redo history.
2701 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2702 let was_dirty = self.is_dirty();
2703 let old_version = self.version.clone();
2704
2705 if let Some((transaction_id, operation)) = self.text.redo() {
2706 self.send_operation(Operation::Buffer(operation), true, cx);
2707 self.did_edit(&old_version, was_dirty, cx);
2708 Some(transaction_id)
2709 } else {
2710 None
2711 }
2712 }
2713
2714 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2715 pub fn redo_to_transaction(
2716 &mut self,
2717 transaction_id: TransactionId,
2718 cx: &mut Context<Self>,
2719 ) -> bool {
2720 let was_dirty = self.is_dirty();
2721 let old_version = self.version.clone();
2722
2723 let operations = self.text.redo_to_transaction(transaction_id);
2724 let redone = !operations.is_empty();
2725 for operation in operations {
2726 self.send_operation(Operation::Buffer(operation), true, cx);
2727 }
2728 if redone {
2729 self.did_edit(&old_version, was_dirty, cx)
2730 }
2731 redone
2732 }
2733
2734 /// Override current completion triggers with the user-provided completion triggers.
2735 pub fn set_completion_triggers(
2736 &mut self,
2737 server_id: LanguageServerId,
2738 triggers: BTreeSet<String>,
2739 cx: &mut Context<Self>,
2740 ) {
2741 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2742 if triggers.is_empty() {
2743 self.completion_triggers_per_language_server
2744 .remove(&server_id);
2745 self.completion_triggers = self
2746 .completion_triggers_per_language_server
2747 .values()
2748 .flat_map(|triggers| triggers.into_iter().cloned())
2749 .collect();
2750 } else {
2751 self.completion_triggers_per_language_server
2752 .insert(server_id, triggers.clone());
2753 self.completion_triggers.extend(triggers.iter().cloned());
2754 }
2755 self.send_operation(
2756 Operation::UpdateCompletionTriggers {
2757 triggers: triggers.into_iter().collect(),
2758 lamport_timestamp: self.completion_triggers_timestamp,
2759 server_id,
2760 },
2761 true,
2762 cx,
2763 );
2764 cx.notify();
2765 }
2766
2767 /// Returns a list of strings which trigger a completion menu for this language.
2768 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2769 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2770 &self.completion_triggers
2771 }
2772
2773 /// Call this directly after performing edits to prevent the preview tab
2774 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2775 /// to return false until there are additional edits.
2776 pub fn refresh_preview(&mut self) {
2777 self.preview_version = self.version.clone();
2778 }
2779
2780 /// Whether we should preserve the preview status of a tab containing this buffer.
2781 pub fn preserve_preview(&self) -> bool {
2782 !self.has_edits_since(&self.preview_version)
2783 }
2784}
2785
2786#[doc(hidden)]
2787#[cfg(any(test, feature = "test-support"))]
2788impl Buffer {
2789 pub fn edit_via_marked_text(
2790 &mut self,
2791 marked_string: &str,
2792 autoindent_mode: Option<AutoindentMode>,
2793 cx: &mut Context<Self>,
2794 ) {
2795 let edits = self.edits_for_marked_text(marked_string);
2796 self.edit(edits, autoindent_mode, cx);
2797 }
2798
2799 pub fn set_group_interval(&mut self, group_interval: Duration) {
2800 self.text.set_group_interval(group_interval);
2801 }
2802
2803 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2804 where
2805 T: rand::Rng,
2806 {
2807 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2808 let mut last_end = None;
2809 for _ in 0..old_range_count {
2810 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2811 break;
2812 }
2813
2814 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2815 let mut range = self.random_byte_range(new_start, rng);
2816 if rng.gen_bool(0.2) {
2817 mem::swap(&mut range.start, &mut range.end);
2818 }
2819 last_end = Some(range.end);
2820
2821 let new_text_len = rng.gen_range(0..10);
2822 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2823 new_text = new_text.to_uppercase();
2824
2825 edits.push((range, new_text));
2826 }
2827 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2828 self.edit(edits, None, cx);
2829 }
2830
2831 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834
2835 let ops = self.text.randomly_undo_redo(rng);
2836 if !ops.is_empty() {
2837 for op in ops {
2838 self.send_operation(Operation::Buffer(op), true, cx);
2839 self.did_edit(&old_version, was_dirty, cx);
2840 }
2841 }
2842 }
2843}
2844
2845impl EventEmitter<BufferEvent> for Buffer {}
2846
2847impl Deref for Buffer {
2848 type Target = TextBuffer;
2849
2850 fn deref(&self) -> &Self::Target {
2851 &self.text
2852 }
2853}
2854
2855impl BufferSnapshot {
2856 /// Returns [`IndentSize`] for a given line that respects user settings and
2857 /// language preferences.
2858 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2859 indent_size_for_line(self, row)
2860 }
2861
2862 /// Returns [`IndentSize`] for a given position that respects user settings
2863 /// and language preferences.
2864 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2865 let settings = language_settings(
2866 self.language_at(position).map(|l| l.name()),
2867 self.file(),
2868 cx,
2869 );
2870 if settings.hard_tabs {
2871 IndentSize::tab()
2872 } else {
2873 IndentSize::spaces(settings.tab_size.get())
2874 }
2875 }
2876
2877 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2878 /// is passed in as `single_indent_size`.
2879 pub fn suggested_indents(
2880 &self,
2881 rows: impl Iterator<Item = u32>,
2882 single_indent_size: IndentSize,
2883 ) -> BTreeMap<u32, IndentSize> {
2884 let mut result = BTreeMap::new();
2885
2886 for row_range in contiguous_ranges(rows, 10) {
2887 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2888 Some(suggestions) => suggestions,
2889 _ => break,
2890 };
2891
2892 for (row, suggestion) in row_range.zip(suggestions) {
2893 let indent_size = if let Some(suggestion) = suggestion {
2894 result
2895 .get(&suggestion.basis_row)
2896 .copied()
2897 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2898 .with_delta(suggestion.delta, single_indent_size)
2899 } else {
2900 self.indent_size_for_line(row)
2901 };
2902
2903 result.insert(row, indent_size);
2904 }
2905 }
2906
2907 result
2908 }
2909
2910 fn suggest_autoindents(
2911 &self,
2912 row_range: Range<u32>,
2913 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2914 let config = &self.language.as_ref()?.config;
2915 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2916
2917 #[derive(Debug, Clone)]
2918 struct StartPosition {
2919 start: Point,
2920 suffix: SharedString,
2921 }
2922
2923 // Find the suggested indentation ranges based on the syntax tree.
2924 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2925 let end = Point::new(row_range.end, 0);
2926 let range = (start..end).to_offset(&self.text);
2927 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2928 Some(&grammar.indents_config.as_ref()?.query)
2929 });
2930 let indent_configs = matches
2931 .grammars()
2932 .iter()
2933 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2934 .collect::<Vec<_>>();
2935
2936 let mut indent_ranges = Vec::<Range<Point>>::new();
2937 let mut start_positions = Vec::<StartPosition>::new();
2938 let mut outdent_positions = Vec::<Point>::new();
2939 while let Some(mat) = matches.peek() {
2940 let mut start: Option<Point> = None;
2941 let mut end: Option<Point> = None;
2942
2943 let config = indent_configs[mat.grammar_index];
2944 for capture in mat.captures {
2945 if capture.index == config.indent_capture_ix {
2946 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2947 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2948 } else if Some(capture.index) == config.start_capture_ix {
2949 start = Some(Point::from_ts_point(capture.node.end_position()));
2950 } else if Some(capture.index) == config.end_capture_ix {
2951 end = Some(Point::from_ts_point(capture.node.start_position()));
2952 } else if Some(capture.index) == config.outdent_capture_ix {
2953 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2954 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2955 start_positions.push(StartPosition {
2956 start: Point::from_ts_point(capture.node.start_position()),
2957 suffix: suffix.clone(),
2958 });
2959 }
2960 }
2961
2962 matches.advance();
2963 if let Some((start, end)) = start.zip(end) {
2964 if start.row == end.row {
2965 continue;
2966 }
2967 let range = start..end;
2968 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2969 Err(ix) => indent_ranges.insert(ix, range),
2970 Ok(ix) => {
2971 let prev_range = &mut indent_ranges[ix];
2972 prev_range.end = prev_range.end.max(range.end);
2973 }
2974 }
2975 }
2976 }
2977
2978 let mut error_ranges = Vec::<Range<Point>>::new();
2979 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2980 grammar.error_query.as_ref()
2981 });
2982 while let Some(mat) = matches.peek() {
2983 let node = mat.captures[0].node;
2984 let start = Point::from_ts_point(node.start_position());
2985 let end = Point::from_ts_point(node.end_position());
2986 let range = start..end;
2987 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2988 Ok(ix) | Err(ix) => ix,
2989 };
2990 let mut end_ix = ix;
2991 while let Some(existing_range) = error_ranges.get(end_ix) {
2992 if existing_range.end < end {
2993 end_ix += 1;
2994 } else {
2995 break;
2996 }
2997 }
2998 error_ranges.splice(ix..end_ix, [range]);
2999 matches.advance();
3000 }
3001
3002 outdent_positions.sort();
3003 for outdent_position in outdent_positions {
3004 // find the innermost indent range containing this outdent_position
3005 // set its end to the outdent position
3006 if let Some(range_to_truncate) = indent_ranges
3007 .iter_mut()
3008 .filter(|indent_range| indent_range.contains(&outdent_position))
3009 .next_back()
3010 {
3011 range_to_truncate.end = outdent_position;
3012 }
3013 }
3014
3015 start_positions.sort_by_key(|b| b.start);
3016
3017 // Find the suggested indentation increases and decreased based on regexes.
3018 let mut regex_outdent_map = HashMap::default();
3019 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3020 let mut start_positions_iter = start_positions.iter().peekable();
3021
3022 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3023 self.for_each_line(
3024 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3025 ..Point::new(row_range.end, 0),
3026 |row, line| {
3027 if config
3028 .decrease_indent_pattern
3029 .as_ref()
3030 .map_or(false, |regex| regex.is_match(line))
3031 {
3032 indent_change_rows.push((row, Ordering::Less));
3033 }
3034 if config
3035 .increase_indent_pattern
3036 .as_ref()
3037 .map_or(false, |regex| regex.is_match(line))
3038 {
3039 indent_change_rows.push((row + 1, Ordering::Greater));
3040 }
3041 while let Some(pos) = start_positions_iter.peek() {
3042 if pos.start.row < row {
3043 let pos = start_positions_iter.next().unwrap();
3044 last_seen_suffix
3045 .entry(pos.suffix.to_string())
3046 .or_default()
3047 .push(pos.start);
3048 } else {
3049 break;
3050 }
3051 }
3052 for rule in &config.decrease_indent_patterns {
3053 if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) {
3054 let row_start_column = self.indent_size_for_line(row).len;
3055 let basis_row = rule
3056 .valid_after
3057 .iter()
3058 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3059 .flatten()
3060 .filter(|start_point| start_point.column <= row_start_column)
3061 .max_by_key(|start_point| start_point.row);
3062 if let Some(outdent_to_row) = basis_row {
3063 regex_outdent_map.insert(row, outdent_to_row.row);
3064 }
3065 break;
3066 }
3067 }
3068 },
3069 );
3070
3071 let mut indent_changes = indent_change_rows.into_iter().peekable();
3072 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3073 prev_non_blank_row.unwrap_or(0)
3074 } else {
3075 row_range.start.saturating_sub(1)
3076 };
3077
3078 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3079 Some(row_range.map(move |row| {
3080 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3081
3082 let mut indent_from_prev_row = false;
3083 let mut outdent_from_prev_row = false;
3084 let mut outdent_to_row = u32::MAX;
3085 let mut from_regex = false;
3086
3087 while let Some((indent_row, delta)) = indent_changes.peek() {
3088 match indent_row.cmp(&row) {
3089 Ordering::Equal => match delta {
3090 Ordering::Less => {
3091 from_regex = true;
3092 outdent_from_prev_row = true
3093 }
3094 Ordering::Greater => {
3095 indent_from_prev_row = true;
3096 from_regex = true
3097 }
3098 _ => {}
3099 },
3100
3101 Ordering::Greater => break,
3102 Ordering::Less => {}
3103 }
3104
3105 indent_changes.next();
3106 }
3107
3108 for range in &indent_ranges {
3109 if range.start.row >= row {
3110 break;
3111 }
3112 if range.start.row == prev_row && range.end > row_start {
3113 indent_from_prev_row = true;
3114 }
3115 if range.end > prev_row_start && range.end <= row_start {
3116 outdent_to_row = outdent_to_row.min(range.start.row);
3117 }
3118 }
3119
3120 if let Some(basis_row) = regex_outdent_map.get(&row) {
3121 indent_from_prev_row = false;
3122 outdent_to_row = *basis_row;
3123 from_regex = true;
3124 }
3125
3126 let within_error = error_ranges
3127 .iter()
3128 .any(|e| e.start.row < row && e.end > row_start);
3129
3130 let suggestion = if outdent_to_row == prev_row
3131 || (outdent_from_prev_row && indent_from_prev_row)
3132 {
3133 Some(IndentSuggestion {
3134 basis_row: prev_row,
3135 delta: Ordering::Equal,
3136 within_error: within_error && !from_regex,
3137 })
3138 } else if indent_from_prev_row {
3139 Some(IndentSuggestion {
3140 basis_row: prev_row,
3141 delta: Ordering::Greater,
3142 within_error: within_error && !from_regex,
3143 })
3144 } else if outdent_to_row < prev_row {
3145 Some(IndentSuggestion {
3146 basis_row: outdent_to_row,
3147 delta: Ordering::Equal,
3148 within_error: within_error && !from_regex,
3149 })
3150 } else if outdent_from_prev_row {
3151 Some(IndentSuggestion {
3152 basis_row: prev_row,
3153 delta: Ordering::Less,
3154 within_error: within_error && !from_regex,
3155 })
3156 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3157 {
3158 Some(IndentSuggestion {
3159 basis_row: prev_row,
3160 delta: Ordering::Equal,
3161 within_error: within_error && !from_regex,
3162 })
3163 } else {
3164 None
3165 };
3166
3167 prev_row = row;
3168 prev_row_start = row_start;
3169 suggestion
3170 }))
3171 }
3172
3173 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3174 while row > 0 {
3175 row -= 1;
3176 if !self.is_line_blank(row) {
3177 return Some(row);
3178 }
3179 }
3180 None
3181 }
3182
3183 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3184 let captures = self.syntax.captures(range, &self.text, |grammar| {
3185 grammar.highlights_query.as_ref()
3186 });
3187 let highlight_maps = captures
3188 .grammars()
3189 .iter()
3190 .map(|grammar| grammar.highlight_map())
3191 .collect();
3192 (captures, highlight_maps)
3193 }
3194
3195 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3196 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3197 /// returned in chunks where each chunk has a single syntax highlighting style and
3198 /// diagnostic status.
3199 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3200 let range = range.start.to_offset(self)..range.end.to_offset(self);
3201
3202 let mut syntax = None;
3203 if language_aware {
3204 syntax = Some(self.get_highlights(range.clone()));
3205 }
3206 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3207 let diagnostics = language_aware;
3208 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3209 }
3210
3211 pub fn highlighted_text_for_range<T: ToOffset>(
3212 &self,
3213 range: Range<T>,
3214 override_style: Option<HighlightStyle>,
3215 syntax_theme: &SyntaxTheme,
3216 ) -> HighlightedText {
3217 HighlightedText::from_buffer_range(
3218 range,
3219 &self.text,
3220 &self.syntax,
3221 override_style,
3222 syntax_theme,
3223 )
3224 }
3225
3226 /// Invokes the given callback for each line of text in the given range of the buffer.
3227 /// Uses callback to avoid allocating a string for each line.
3228 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3229 let mut line = String::new();
3230 let mut row = range.start.row;
3231 for chunk in self
3232 .as_rope()
3233 .chunks_in_range(range.to_offset(self))
3234 .chain(["\n"])
3235 {
3236 for (newline_ix, text) in chunk.split('\n').enumerate() {
3237 if newline_ix > 0 {
3238 callback(row, &line);
3239 row += 1;
3240 line.clear();
3241 }
3242 line.push_str(text);
3243 }
3244 }
3245 }
3246
3247 /// Iterates over every [`SyntaxLayer`] in the buffer.
3248 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3249 self.syntax
3250 .layers_for_range(0..self.len(), &self.text, true)
3251 }
3252
3253 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3254 let offset = position.to_offset(self);
3255 self.syntax
3256 .layers_for_range(offset..offset, &self.text, false)
3257 .filter(|l| l.node().end_byte() > offset)
3258 .last()
3259 }
3260
3261 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3262 &self,
3263 range: Range<D>,
3264 ) -> Option<SyntaxLayer<'_>> {
3265 let range = range.to_offset(self);
3266 return self
3267 .syntax
3268 .layers_for_range(range, &self.text, false)
3269 .max_by(|a, b| {
3270 if a.depth != b.depth {
3271 a.depth.cmp(&b.depth)
3272 } else if a.offset.0 != b.offset.0 {
3273 a.offset.0.cmp(&b.offset.0)
3274 } else {
3275 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3276 }
3277 });
3278 }
3279
3280 /// Returns the main [`Language`].
3281 pub fn language(&self) -> Option<&Arc<Language>> {
3282 self.language.as_ref()
3283 }
3284
3285 /// Returns the [`Language`] at the given location.
3286 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3287 self.syntax_layer_at(position)
3288 .map(|info| info.language)
3289 .or(self.language.as_ref())
3290 }
3291
3292 /// Returns the settings for the language at the given location.
3293 pub fn settings_at<'a, D: ToOffset>(
3294 &'a self,
3295 position: D,
3296 cx: &'a App,
3297 ) -> Cow<'a, LanguageSettings> {
3298 language_settings(
3299 self.language_at(position).map(|l| l.name()),
3300 self.file.as_ref(),
3301 cx,
3302 )
3303 }
3304
3305 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3306 CharClassifier::new(self.language_scope_at(point))
3307 }
3308
3309 /// Returns the [`LanguageScope`] at the given location.
3310 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3311 let offset = position.to_offset(self);
3312 let mut scope = None;
3313 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3314
3315 // Use the layer that has the smallest node intersecting the given point.
3316 for layer in self
3317 .syntax
3318 .layers_for_range(offset..offset, &self.text, false)
3319 {
3320 let mut cursor = layer.node().walk();
3321
3322 let mut range = None;
3323 loop {
3324 let child_range = cursor.node().byte_range();
3325 if !child_range.contains(&offset) {
3326 break;
3327 }
3328
3329 range = Some(child_range);
3330 if cursor.goto_first_child_for_byte(offset).is_none() {
3331 break;
3332 }
3333 }
3334
3335 if let Some(range) = range {
3336 if smallest_range_and_depth.as_ref().map_or(
3337 true,
3338 |(smallest_range, smallest_range_depth)| {
3339 if layer.depth > *smallest_range_depth {
3340 true
3341 } else if layer.depth == *smallest_range_depth {
3342 range.len() < smallest_range.len()
3343 } else {
3344 false
3345 }
3346 },
3347 ) {
3348 smallest_range_and_depth = Some((range, layer.depth));
3349 scope = Some(LanguageScope {
3350 language: layer.language.clone(),
3351 override_id: layer.override_id(offset, &self.text),
3352 });
3353 }
3354 }
3355 }
3356
3357 scope.or_else(|| {
3358 self.language.clone().map(|language| LanguageScope {
3359 language,
3360 override_id: None,
3361 })
3362 })
3363 }
3364
3365 /// Returns a tuple of the range and character kind of the word
3366 /// surrounding the given position.
3367 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3368 let mut start = start.to_offset(self);
3369 let mut end = start;
3370 let mut next_chars = self.chars_at(start).take(128).peekable();
3371 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3372
3373 let classifier = self.char_classifier_at(start);
3374 let word_kind = cmp::max(
3375 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3376 next_chars.peek().copied().map(|c| classifier.kind(c)),
3377 );
3378
3379 for ch in prev_chars {
3380 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3381 start -= ch.len_utf8();
3382 } else {
3383 break;
3384 }
3385 }
3386
3387 for ch in next_chars {
3388 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3389 end += ch.len_utf8();
3390 } else {
3391 break;
3392 }
3393 }
3394
3395 (start..end, word_kind)
3396 }
3397
3398 /// Returns the closest syntax node enclosing the given range.
3399 pub fn syntax_ancestor<'a, T: ToOffset>(
3400 &'a self,
3401 range: Range<T>,
3402 ) -> Option<tree_sitter::Node<'a>> {
3403 let range = range.start.to_offset(self)..range.end.to_offset(self);
3404 let mut result: Option<tree_sitter::Node<'a>> = None;
3405 'outer: for layer in self
3406 .syntax
3407 .layers_for_range(range.clone(), &self.text, true)
3408 {
3409 let mut cursor = layer.node().walk();
3410
3411 // Descend to the first leaf that touches the start of the range.
3412 //
3413 // If the range is non-empty and the current node ends exactly at the start,
3414 // move to the next sibling to find a node that extends beyond the start.
3415 //
3416 // If the range is empty and the current node starts after the range position,
3417 // move to the previous sibling to find the node that contains the position.
3418 while cursor.goto_first_child_for_byte(range.start).is_some() {
3419 if !range.is_empty() && cursor.node().end_byte() == range.start {
3420 cursor.goto_next_sibling();
3421 }
3422 if range.is_empty() && cursor.node().start_byte() > range.start {
3423 cursor.goto_previous_sibling();
3424 }
3425 }
3426
3427 // Ascend to the smallest ancestor that strictly contains the range.
3428 loop {
3429 let node_range = cursor.node().byte_range();
3430 if node_range.start <= range.start
3431 && node_range.end >= range.end
3432 && node_range.len() > range.len()
3433 {
3434 break;
3435 }
3436 if !cursor.goto_parent() {
3437 continue 'outer;
3438 }
3439 }
3440
3441 let left_node = cursor.node();
3442 let mut layer_result = left_node;
3443
3444 // For an empty range, try to find another node immediately to the right of the range.
3445 if left_node.end_byte() == range.start {
3446 let mut right_node = None;
3447 while !cursor.goto_next_sibling() {
3448 if !cursor.goto_parent() {
3449 break;
3450 }
3451 }
3452
3453 while cursor.node().start_byte() == range.start {
3454 right_node = Some(cursor.node());
3455 if !cursor.goto_first_child() {
3456 break;
3457 }
3458 }
3459
3460 // If there is a candidate node on both sides of the (empty) range, then
3461 // decide between the two by favoring a named node over an anonymous token.
3462 // If both nodes are the same in that regard, favor the right one.
3463 if let Some(right_node) = right_node {
3464 if right_node.is_named() || !left_node.is_named() {
3465 layer_result = right_node;
3466 }
3467 }
3468 }
3469
3470 if let Some(previous_result) = &result {
3471 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3472 continue;
3473 }
3474 }
3475 result = Some(layer_result);
3476 }
3477
3478 result
3479 }
3480
3481 /// Returns the root syntax node within the given row
3482 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3483 let start_offset = position.to_offset(self);
3484
3485 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3486
3487 let layer = self
3488 .syntax
3489 .layers_for_range(start_offset..start_offset, &self.text, true)
3490 .next()?;
3491
3492 let mut cursor = layer.node().walk();
3493
3494 // Descend to the first leaf that touches the start of the range.
3495 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3496 if cursor.node().end_byte() == start_offset {
3497 cursor.goto_next_sibling();
3498 }
3499 }
3500
3501 // Ascend to the root node within the same row.
3502 while cursor.goto_parent() {
3503 if cursor.node().start_position().row != row {
3504 break;
3505 }
3506 }
3507
3508 return Some(cursor.node());
3509 }
3510
3511 /// Returns the outline for the buffer.
3512 ///
3513 /// This method allows passing an optional [`SyntaxTheme`] to
3514 /// syntax-highlight the returned symbols.
3515 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3516 self.outline_items_containing(0..self.len(), true, theme)
3517 .map(Outline::new)
3518 }
3519
3520 /// Returns all the symbols that contain the given position.
3521 ///
3522 /// This method allows passing an optional [`SyntaxTheme`] to
3523 /// syntax-highlight the returned symbols.
3524 pub fn symbols_containing<T: ToOffset>(
3525 &self,
3526 position: T,
3527 theme: Option<&SyntaxTheme>,
3528 ) -> Option<Vec<OutlineItem<Anchor>>> {
3529 let position = position.to_offset(self);
3530 let mut items = self.outline_items_containing(
3531 position.saturating_sub(1)..self.len().min(position + 1),
3532 false,
3533 theme,
3534 )?;
3535 let mut prev_depth = None;
3536 items.retain(|item| {
3537 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3538 prev_depth = Some(item.depth);
3539 result
3540 });
3541 Some(items)
3542 }
3543
3544 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3545 let range = range.to_offset(self);
3546 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3547 grammar.outline_config.as_ref().map(|c| &c.query)
3548 });
3549 let configs = matches
3550 .grammars()
3551 .iter()
3552 .map(|g| g.outline_config.as_ref().unwrap())
3553 .collect::<Vec<_>>();
3554
3555 while let Some(mat) = matches.peek() {
3556 let config = &configs[mat.grammar_index];
3557 let containing_item_node = maybe!({
3558 let item_node = mat.captures.iter().find_map(|cap| {
3559 if cap.index == config.item_capture_ix {
3560 Some(cap.node)
3561 } else {
3562 None
3563 }
3564 })?;
3565
3566 let item_byte_range = item_node.byte_range();
3567 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3568 None
3569 } else {
3570 Some(item_node)
3571 }
3572 });
3573
3574 if let Some(item_node) = containing_item_node {
3575 return Some(
3576 Point::from_ts_point(item_node.start_position())
3577 ..Point::from_ts_point(item_node.end_position()),
3578 );
3579 }
3580
3581 matches.advance();
3582 }
3583 None
3584 }
3585
3586 pub fn outline_items_containing<T: ToOffset>(
3587 &self,
3588 range: Range<T>,
3589 include_extra_context: bool,
3590 theme: Option<&SyntaxTheme>,
3591 ) -> Option<Vec<OutlineItem<Anchor>>> {
3592 let range = range.to_offset(self);
3593 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3594 grammar.outline_config.as_ref().map(|c| &c.query)
3595 });
3596 let configs = matches
3597 .grammars()
3598 .iter()
3599 .map(|g| g.outline_config.as_ref().unwrap())
3600 .collect::<Vec<_>>();
3601
3602 let mut items = Vec::new();
3603 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3604 while let Some(mat) = matches.peek() {
3605 let config = &configs[mat.grammar_index];
3606 if let Some(item) =
3607 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3608 {
3609 items.push(item);
3610 } else if let Some(capture) = mat
3611 .captures
3612 .iter()
3613 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3614 {
3615 let capture_range = capture.node.start_position()..capture.node.end_position();
3616 let mut capture_row_range =
3617 capture_range.start.row as u32..capture_range.end.row as u32;
3618 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3619 {
3620 capture_row_range.end -= 1;
3621 }
3622 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3623 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3624 last_row_range.end = capture_row_range.end;
3625 } else {
3626 annotation_row_ranges.push(capture_row_range);
3627 }
3628 } else {
3629 annotation_row_ranges.push(capture_row_range);
3630 }
3631 }
3632 matches.advance();
3633 }
3634
3635 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3636
3637 // Assign depths based on containment relationships and convert to anchors.
3638 let mut item_ends_stack = Vec::<Point>::new();
3639 let mut anchor_items = Vec::new();
3640 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3641 for item in items {
3642 while let Some(last_end) = item_ends_stack.last().copied() {
3643 if last_end < item.range.end {
3644 item_ends_stack.pop();
3645 } else {
3646 break;
3647 }
3648 }
3649
3650 let mut annotation_row_range = None;
3651 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3652 let row_preceding_item = item.range.start.row.saturating_sub(1);
3653 if next_annotation_row_range.end < row_preceding_item {
3654 annotation_row_ranges.next();
3655 } else {
3656 if next_annotation_row_range.end == row_preceding_item {
3657 annotation_row_range = Some(next_annotation_row_range.clone());
3658 annotation_row_ranges.next();
3659 }
3660 break;
3661 }
3662 }
3663
3664 anchor_items.push(OutlineItem {
3665 depth: item_ends_stack.len(),
3666 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3667 text: item.text,
3668 highlight_ranges: item.highlight_ranges,
3669 name_ranges: item.name_ranges,
3670 body_range: item.body_range.map(|body_range| {
3671 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3672 }),
3673 annotation_range: annotation_row_range.map(|annotation_range| {
3674 self.anchor_after(Point::new(annotation_range.start, 0))
3675 ..self.anchor_before(Point::new(
3676 annotation_range.end,
3677 self.line_len(annotation_range.end),
3678 ))
3679 }),
3680 });
3681 item_ends_stack.push(item.range.end);
3682 }
3683
3684 Some(anchor_items)
3685 }
3686
3687 fn next_outline_item(
3688 &self,
3689 config: &OutlineConfig,
3690 mat: &SyntaxMapMatch,
3691 range: &Range<usize>,
3692 include_extra_context: bool,
3693 theme: Option<&SyntaxTheme>,
3694 ) -> Option<OutlineItem<Point>> {
3695 let item_node = mat.captures.iter().find_map(|cap| {
3696 if cap.index == config.item_capture_ix {
3697 Some(cap.node)
3698 } else {
3699 None
3700 }
3701 })?;
3702
3703 let item_byte_range = item_node.byte_range();
3704 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3705 return None;
3706 }
3707 let item_point_range = Point::from_ts_point(item_node.start_position())
3708 ..Point::from_ts_point(item_node.end_position());
3709
3710 let mut open_point = None;
3711 let mut close_point = None;
3712 let mut buffer_ranges = Vec::new();
3713 for capture in mat.captures {
3714 let node_is_name;
3715 if capture.index == config.name_capture_ix {
3716 node_is_name = true;
3717 } else if Some(capture.index) == config.context_capture_ix
3718 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3719 {
3720 node_is_name = false;
3721 } else {
3722 if Some(capture.index) == config.open_capture_ix {
3723 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3724 } else if Some(capture.index) == config.close_capture_ix {
3725 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3726 }
3727
3728 continue;
3729 }
3730
3731 let mut range = capture.node.start_byte()..capture.node.end_byte();
3732 let start = capture.node.start_position();
3733 if capture.node.end_position().row > start.row {
3734 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3735 }
3736
3737 if !range.is_empty() {
3738 buffer_ranges.push((range, node_is_name));
3739 }
3740 }
3741 if buffer_ranges.is_empty() {
3742 return None;
3743 }
3744 let mut text = String::new();
3745 let mut highlight_ranges = Vec::new();
3746 let mut name_ranges = Vec::new();
3747 let mut chunks = self.chunks(
3748 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3749 true,
3750 );
3751 let mut last_buffer_range_end = 0;
3752
3753 for (buffer_range, is_name) in buffer_ranges {
3754 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3755 if space_added {
3756 text.push(' ');
3757 }
3758 let before_append_len = text.len();
3759 let mut offset = buffer_range.start;
3760 chunks.seek(buffer_range.clone());
3761 for mut chunk in chunks.by_ref() {
3762 if chunk.text.len() > buffer_range.end - offset {
3763 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3764 offset = buffer_range.end;
3765 } else {
3766 offset += chunk.text.len();
3767 }
3768 let style = chunk
3769 .syntax_highlight_id
3770 .zip(theme)
3771 .and_then(|(highlight, theme)| highlight.style(theme));
3772 if let Some(style) = style {
3773 let start = text.len();
3774 let end = start + chunk.text.len();
3775 highlight_ranges.push((start..end, style));
3776 }
3777 text.push_str(chunk.text);
3778 if offset >= buffer_range.end {
3779 break;
3780 }
3781 }
3782 if is_name {
3783 let after_append_len = text.len();
3784 let start = if space_added && !name_ranges.is_empty() {
3785 before_append_len - 1
3786 } else {
3787 before_append_len
3788 };
3789 name_ranges.push(start..after_append_len);
3790 }
3791 last_buffer_range_end = buffer_range.end;
3792 }
3793
3794 Some(OutlineItem {
3795 depth: 0, // We'll calculate the depth later
3796 range: item_point_range,
3797 text,
3798 highlight_ranges,
3799 name_ranges,
3800 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3801 annotation_range: None,
3802 })
3803 }
3804
3805 pub fn function_body_fold_ranges<T: ToOffset>(
3806 &self,
3807 within: Range<T>,
3808 ) -> impl Iterator<Item = Range<usize>> + '_ {
3809 self.text_object_ranges(within, TreeSitterOptions::default())
3810 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3811 }
3812
3813 /// For each grammar in the language, runs the provided
3814 /// [`tree_sitter::Query`] against the given range.
3815 pub fn matches(
3816 &self,
3817 range: Range<usize>,
3818 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3819 ) -> SyntaxMapMatches<'_> {
3820 self.syntax.matches(range, self, query)
3821 }
3822
3823 pub fn all_bracket_ranges(
3824 &self,
3825 range: Range<usize>,
3826 ) -> impl Iterator<Item = BracketMatch> + '_ {
3827 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3828 grammar.brackets_config.as_ref().map(|c| &c.query)
3829 });
3830 let configs = matches
3831 .grammars()
3832 .iter()
3833 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3834 .collect::<Vec<_>>();
3835
3836 iter::from_fn(move || {
3837 while let Some(mat) = matches.peek() {
3838 let mut open = None;
3839 let mut close = None;
3840 let config = &configs[mat.grammar_index];
3841 let pattern = &config.patterns[mat.pattern_index];
3842 for capture in mat.captures {
3843 if capture.index == config.open_capture_ix {
3844 open = Some(capture.node.byte_range());
3845 } else if capture.index == config.close_capture_ix {
3846 close = Some(capture.node.byte_range());
3847 }
3848 }
3849
3850 matches.advance();
3851
3852 let Some((open_range, close_range)) = open.zip(close) else {
3853 continue;
3854 };
3855
3856 let bracket_range = open_range.start..=close_range.end;
3857 if !bracket_range.overlaps(&range) {
3858 continue;
3859 }
3860
3861 return Some(BracketMatch {
3862 open_range,
3863 close_range,
3864 newline_only: pattern.newline_only,
3865 });
3866 }
3867 None
3868 })
3869 }
3870
3871 /// Returns bracket range pairs overlapping or adjacent to `range`
3872 pub fn bracket_ranges<T: ToOffset>(
3873 &self,
3874 range: Range<T>,
3875 ) -> impl Iterator<Item = BracketMatch> + '_ {
3876 // Find bracket pairs that *inclusively* contain the given range.
3877 let range = range.start.to_offset(self).saturating_sub(1)
3878 ..self.len().min(range.end.to_offset(self) + 1);
3879 self.all_bracket_ranges(range)
3880 .filter(|pair| !pair.newline_only)
3881 }
3882
3883 pub fn debug_variables_query<T: ToOffset>(
3884 &self,
3885 range: Range<T>,
3886 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3887 let range = range.start.to_offset(self).saturating_sub(1)
3888 ..self.len().min(range.end.to_offset(self) + 1);
3889
3890 let mut matches = self.syntax.matches_with_options(
3891 range.clone(),
3892 &self.text,
3893 TreeSitterOptions::default(),
3894 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3895 );
3896
3897 let configs = matches
3898 .grammars()
3899 .iter()
3900 .map(|grammar| grammar.debug_variables_config.as_ref())
3901 .collect::<Vec<_>>();
3902
3903 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3904
3905 iter::from_fn(move || {
3906 loop {
3907 while let Some(capture) = captures.pop() {
3908 if capture.0.overlaps(&range) {
3909 return Some(capture);
3910 }
3911 }
3912
3913 let mat = matches.peek()?;
3914
3915 let Some(config) = configs[mat.grammar_index].as_ref() else {
3916 matches.advance();
3917 continue;
3918 };
3919
3920 for capture in mat.captures {
3921 let Some(ix) = config
3922 .objects_by_capture_ix
3923 .binary_search_by_key(&capture.index, |e| e.0)
3924 .ok()
3925 else {
3926 continue;
3927 };
3928 let text_object = config.objects_by_capture_ix[ix].1;
3929 let byte_range = capture.node.byte_range();
3930
3931 let mut found = false;
3932 for (range, existing) in captures.iter_mut() {
3933 if existing == &text_object {
3934 range.start = range.start.min(byte_range.start);
3935 range.end = range.end.max(byte_range.end);
3936 found = true;
3937 break;
3938 }
3939 }
3940
3941 if !found {
3942 captures.push((byte_range, text_object));
3943 }
3944 }
3945
3946 matches.advance();
3947 }
3948 })
3949 }
3950
3951 pub fn text_object_ranges<T: ToOffset>(
3952 &self,
3953 range: Range<T>,
3954 options: TreeSitterOptions,
3955 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3956 let range = range.start.to_offset(self).saturating_sub(1)
3957 ..self.len().min(range.end.to_offset(self) + 1);
3958
3959 let mut matches =
3960 self.syntax
3961 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3962 grammar.text_object_config.as_ref().map(|c| &c.query)
3963 });
3964
3965 let configs = matches
3966 .grammars()
3967 .iter()
3968 .map(|grammar| grammar.text_object_config.as_ref())
3969 .collect::<Vec<_>>();
3970
3971 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3972
3973 iter::from_fn(move || {
3974 loop {
3975 while let Some(capture) = captures.pop() {
3976 if capture.0.overlaps(&range) {
3977 return Some(capture);
3978 }
3979 }
3980
3981 let mat = matches.peek()?;
3982
3983 let Some(config) = configs[mat.grammar_index].as_ref() else {
3984 matches.advance();
3985 continue;
3986 };
3987
3988 for capture in mat.captures {
3989 let Some(ix) = config
3990 .text_objects_by_capture_ix
3991 .binary_search_by_key(&capture.index, |e| e.0)
3992 .ok()
3993 else {
3994 continue;
3995 };
3996 let text_object = config.text_objects_by_capture_ix[ix].1;
3997 let byte_range = capture.node.byte_range();
3998
3999 let mut found = false;
4000 for (range, existing) in captures.iter_mut() {
4001 if existing == &text_object {
4002 range.start = range.start.min(byte_range.start);
4003 range.end = range.end.max(byte_range.end);
4004 found = true;
4005 break;
4006 }
4007 }
4008
4009 if !found {
4010 captures.push((byte_range, text_object));
4011 }
4012 }
4013
4014 matches.advance();
4015 }
4016 })
4017 }
4018
4019 /// Returns enclosing bracket ranges containing the given range
4020 pub fn enclosing_bracket_ranges<T: ToOffset>(
4021 &self,
4022 range: Range<T>,
4023 ) -> impl Iterator<Item = BracketMatch> + '_ {
4024 let range = range.start.to_offset(self)..range.end.to_offset(self);
4025
4026 self.bracket_ranges(range.clone()).filter(move |pair| {
4027 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4028 })
4029 }
4030
4031 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4032 ///
4033 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4034 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4035 &self,
4036 range: Range<T>,
4037 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4038 ) -> Option<(Range<usize>, Range<usize>)> {
4039 let range = range.start.to_offset(self)..range.end.to_offset(self);
4040
4041 // Get the ranges of the innermost pair of brackets.
4042 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4043
4044 for pair in self.enclosing_bracket_ranges(range.clone()) {
4045 if let Some(range_filter) = range_filter {
4046 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
4047 continue;
4048 }
4049 }
4050
4051 let len = pair.close_range.end - pair.open_range.start;
4052
4053 if let Some((existing_open, existing_close)) = &result {
4054 let existing_len = existing_close.end - existing_open.start;
4055 if len > existing_len {
4056 continue;
4057 }
4058 }
4059
4060 result = Some((pair.open_range, pair.close_range));
4061 }
4062
4063 result
4064 }
4065
4066 /// Returns anchor ranges for any matches of the redaction query.
4067 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4068 /// will be run on the relevant section of the buffer.
4069 pub fn redacted_ranges<T: ToOffset>(
4070 &self,
4071 range: Range<T>,
4072 ) -> impl Iterator<Item = Range<usize>> + '_ {
4073 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4074 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4075 grammar
4076 .redactions_config
4077 .as_ref()
4078 .map(|config| &config.query)
4079 });
4080
4081 let configs = syntax_matches
4082 .grammars()
4083 .iter()
4084 .map(|grammar| grammar.redactions_config.as_ref())
4085 .collect::<Vec<_>>();
4086
4087 iter::from_fn(move || {
4088 let redacted_range = syntax_matches
4089 .peek()
4090 .and_then(|mat| {
4091 configs[mat.grammar_index].and_then(|config| {
4092 mat.captures
4093 .iter()
4094 .find(|capture| capture.index == config.redaction_capture_ix)
4095 })
4096 })
4097 .map(|mat| mat.node.byte_range());
4098 syntax_matches.advance();
4099 redacted_range
4100 })
4101 }
4102
4103 pub fn injections_intersecting_range<T: ToOffset>(
4104 &self,
4105 range: Range<T>,
4106 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4107 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4108
4109 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4110 grammar
4111 .injection_config
4112 .as_ref()
4113 .map(|config| &config.query)
4114 });
4115
4116 let configs = syntax_matches
4117 .grammars()
4118 .iter()
4119 .map(|grammar| grammar.injection_config.as_ref())
4120 .collect::<Vec<_>>();
4121
4122 iter::from_fn(move || {
4123 let ranges = syntax_matches.peek().and_then(|mat| {
4124 let config = &configs[mat.grammar_index]?;
4125 let content_capture_range = mat.captures.iter().find_map(|capture| {
4126 if capture.index == config.content_capture_ix {
4127 Some(capture.node.byte_range())
4128 } else {
4129 None
4130 }
4131 })?;
4132 let language = self.language_at(content_capture_range.start)?;
4133 Some((content_capture_range, language))
4134 });
4135 syntax_matches.advance();
4136 ranges
4137 })
4138 }
4139
4140 pub fn runnable_ranges(
4141 &self,
4142 offset_range: Range<usize>,
4143 ) -> impl Iterator<Item = RunnableRange> + '_ {
4144 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4145 grammar.runnable_config.as_ref().map(|config| &config.query)
4146 });
4147
4148 let test_configs = syntax_matches
4149 .grammars()
4150 .iter()
4151 .map(|grammar| grammar.runnable_config.as_ref())
4152 .collect::<Vec<_>>();
4153
4154 iter::from_fn(move || {
4155 loop {
4156 let mat = syntax_matches.peek()?;
4157
4158 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4159 let mut run_range = None;
4160 let full_range = mat.captures.iter().fold(
4161 Range {
4162 start: usize::MAX,
4163 end: 0,
4164 },
4165 |mut acc, next| {
4166 let byte_range = next.node.byte_range();
4167 if acc.start > byte_range.start {
4168 acc.start = byte_range.start;
4169 }
4170 if acc.end < byte_range.end {
4171 acc.end = byte_range.end;
4172 }
4173 acc
4174 },
4175 );
4176 if full_range.start > full_range.end {
4177 // We did not find a full spanning range of this match.
4178 return None;
4179 }
4180 let extra_captures: SmallVec<[_; 1]> =
4181 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4182 test_configs
4183 .extra_captures
4184 .get(capture.index as usize)
4185 .cloned()
4186 .and_then(|tag_name| match tag_name {
4187 RunnableCapture::Named(name) => {
4188 Some((capture.node.byte_range(), name))
4189 }
4190 RunnableCapture::Run => {
4191 let _ = run_range.insert(capture.node.byte_range());
4192 None
4193 }
4194 })
4195 }));
4196 let run_range = run_range?;
4197 let tags = test_configs
4198 .query
4199 .property_settings(mat.pattern_index)
4200 .iter()
4201 .filter_map(|property| {
4202 if *property.key == *"tag" {
4203 property
4204 .value
4205 .as_ref()
4206 .map(|value| RunnableTag(value.to_string().into()))
4207 } else {
4208 None
4209 }
4210 })
4211 .collect();
4212 let extra_captures = extra_captures
4213 .into_iter()
4214 .map(|(range, name)| {
4215 (
4216 name.to_string(),
4217 self.text_for_range(range.clone()).collect::<String>(),
4218 )
4219 })
4220 .collect();
4221 // All tags should have the same range.
4222 Some(RunnableRange {
4223 run_range,
4224 full_range,
4225 runnable: Runnable {
4226 tags,
4227 language: mat.language,
4228 buffer: self.remote_id(),
4229 },
4230 extra_captures,
4231 buffer_id: self.remote_id(),
4232 })
4233 });
4234
4235 syntax_matches.advance();
4236 if test_range.is_some() {
4237 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4238 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4239 return test_range;
4240 }
4241 }
4242 })
4243 }
4244
4245 /// Returns selections for remote peers intersecting the given range.
4246 #[allow(clippy::type_complexity)]
4247 pub fn selections_in_range(
4248 &self,
4249 range: Range<Anchor>,
4250 include_local: bool,
4251 ) -> impl Iterator<
4252 Item = (
4253 ReplicaId,
4254 bool,
4255 CursorShape,
4256 impl Iterator<Item = &Selection<Anchor>> + '_,
4257 ),
4258 > + '_ {
4259 self.remote_selections
4260 .iter()
4261 .filter(move |(replica_id, set)| {
4262 (include_local || **replica_id != self.text.replica_id())
4263 && !set.selections.is_empty()
4264 })
4265 .map(move |(replica_id, set)| {
4266 let start_ix = match set.selections.binary_search_by(|probe| {
4267 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4268 }) {
4269 Ok(ix) | Err(ix) => ix,
4270 };
4271 let end_ix = match set.selections.binary_search_by(|probe| {
4272 probe.start.cmp(&range.end, self).then(Ordering::Less)
4273 }) {
4274 Ok(ix) | Err(ix) => ix,
4275 };
4276
4277 (
4278 *replica_id,
4279 set.line_mode,
4280 set.cursor_shape,
4281 set.selections[start_ix..end_ix].iter(),
4282 )
4283 })
4284 }
4285
4286 /// Returns if the buffer contains any diagnostics.
4287 pub fn has_diagnostics(&self) -> bool {
4288 !self.diagnostics.is_empty()
4289 }
4290
4291 /// Returns all the diagnostics intersecting the given range.
4292 pub fn diagnostics_in_range<'a, T, O>(
4293 &'a self,
4294 search_range: Range<T>,
4295 reversed: bool,
4296 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4297 where
4298 T: 'a + Clone + ToOffset,
4299 O: 'a + FromAnchor,
4300 {
4301 let mut iterators: Vec<_> = self
4302 .diagnostics
4303 .iter()
4304 .map(|(_, collection)| {
4305 collection
4306 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4307 .peekable()
4308 })
4309 .collect();
4310
4311 std::iter::from_fn(move || {
4312 let (next_ix, _) = iterators
4313 .iter_mut()
4314 .enumerate()
4315 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4316 .min_by(|(_, a), (_, b)| {
4317 let cmp = a
4318 .range
4319 .start
4320 .cmp(&b.range.start, self)
4321 // when range is equal, sort by diagnostic severity
4322 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4323 // and stabilize order with group_id
4324 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4325 if reversed { cmp.reverse() } else { cmp }
4326 })?;
4327 iterators[next_ix]
4328 .next()
4329 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4330 diagnostic,
4331 range: FromAnchor::from_anchor(&range.start, self)
4332 ..FromAnchor::from_anchor(&range.end, self),
4333 })
4334 })
4335 }
4336
4337 /// Returns all the diagnostic groups associated with the given
4338 /// language server ID. If no language server ID is provided,
4339 /// all diagnostics groups are returned.
4340 pub fn diagnostic_groups(
4341 &self,
4342 language_server_id: Option<LanguageServerId>,
4343 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4344 let mut groups = Vec::new();
4345
4346 if let Some(language_server_id) = language_server_id {
4347 if let Ok(ix) = self
4348 .diagnostics
4349 .binary_search_by_key(&language_server_id, |e| e.0)
4350 {
4351 self.diagnostics[ix]
4352 .1
4353 .groups(language_server_id, &mut groups, self);
4354 }
4355 } else {
4356 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4357 diagnostics.groups(*language_server_id, &mut groups, self);
4358 }
4359 }
4360
4361 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4362 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4363 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4364 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4365 });
4366
4367 groups
4368 }
4369
4370 /// Returns an iterator over the diagnostics for the given group.
4371 pub fn diagnostic_group<O>(
4372 &self,
4373 group_id: usize,
4374 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4375 where
4376 O: FromAnchor + 'static,
4377 {
4378 self.diagnostics
4379 .iter()
4380 .flat_map(move |(_, set)| set.group(group_id, self))
4381 }
4382
4383 /// An integer version number that accounts for all updates besides
4384 /// the buffer's text itself (which is versioned via a version vector).
4385 pub fn non_text_state_update_count(&self) -> usize {
4386 self.non_text_state_update_count
4387 }
4388
4389 /// An integer version that changes when the buffer's syntax changes.
4390 pub fn syntax_update_count(&self) -> usize {
4391 self.syntax.update_count()
4392 }
4393
4394 /// Returns a snapshot of underlying file.
4395 pub fn file(&self) -> Option<&Arc<dyn File>> {
4396 self.file.as_ref()
4397 }
4398
4399 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4400 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4401 if let Some(file) = self.file() {
4402 if file.path().file_name().is_none() || include_root {
4403 Some(file.full_path(cx))
4404 } else {
4405 Some(file.path().to_path_buf())
4406 }
4407 } else {
4408 None
4409 }
4410 }
4411
4412 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4413 let query_str = query.fuzzy_contents;
4414 if query_str.map_or(false, |query| query.is_empty()) {
4415 return BTreeMap::default();
4416 }
4417
4418 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4419 language,
4420 override_id: None,
4421 }));
4422
4423 let mut query_ix = 0;
4424 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4425 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4426
4427 let mut words = BTreeMap::default();
4428 let mut current_word_start_ix = None;
4429 let mut chunk_ix = query.range.start;
4430 for chunk in self.chunks(query.range, false) {
4431 for (i, c) in chunk.text.char_indices() {
4432 let ix = chunk_ix + i;
4433 if classifier.is_word(c) {
4434 if current_word_start_ix.is_none() {
4435 current_word_start_ix = Some(ix);
4436 }
4437
4438 if let Some(query_chars) = &query_chars {
4439 if query_ix < query_len {
4440 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4441 query_ix += 1;
4442 }
4443 }
4444 }
4445 continue;
4446 } else if let Some(word_start) = current_word_start_ix.take() {
4447 if query_ix == query_len {
4448 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4449 let mut word_text = self.text_for_range(word_start..ix).peekable();
4450 let first_char = word_text
4451 .peek()
4452 .and_then(|first_chunk| first_chunk.chars().next());
4453 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4454 if !query.skip_digits
4455 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4456 {
4457 words.insert(word_text.collect(), word_range);
4458 }
4459 }
4460 }
4461 query_ix = 0;
4462 }
4463 chunk_ix += chunk.text.len();
4464 }
4465
4466 words
4467 }
4468}
4469
4470pub struct WordsQuery<'a> {
4471 /// Only returns words with all chars from the fuzzy string in them.
4472 pub fuzzy_contents: Option<&'a str>,
4473 /// Skips words that start with a digit.
4474 pub skip_digits: bool,
4475 /// Buffer offset range, to look for words.
4476 pub range: Range<usize>,
4477}
4478
4479fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4480 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4481}
4482
4483fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4484 let mut result = IndentSize::spaces(0);
4485 for c in text {
4486 let kind = match c {
4487 ' ' => IndentKind::Space,
4488 '\t' => IndentKind::Tab,
4489 _ => break,
4490 };
4491 if result.len == 0 {
4492 result.kind = kind;
4493 }
4494 result.len += 1;
4495 }
4496 result
4497}
4498
4499impl Clone for BufferSnapshot {
4500 fn clone(&self) -> Self {
4501 Self {
4502 text: self.text.clone(),
4503 syntax: self.syntax.clone(),
4504 file: self.file.clone(),
4505 remote_selections: self.remote_selections.clone(),
4506 diagnostics: self.diagnostics.clone(),
4507 language: self.language.clone(),
4508 non_text_state_update_count: self.non_text_state_update_count,
4509 }
4510 }
4511}
4512
4513impl Deref for BufferSnapshot {
4514 type Target = text::BufferSnapshot;
4515
4516 fn deref(&self) -> &Self::Target {
4517 &self.text
4518 }
4519}
4520
4521unsafe impl Send for BufferChunks<'_> {}
4522
4523impl<'a> BufferChunks<'a> {
4524 pub(crate) fn new(
4525 text: &'a Rope,
4526 range: Range<usize>,
4527 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4528 diagnostics: bool,
4529 buffer_snapshot: Option<&'a BufferSnapshot>,
4530 ) -> Self {
4531 let mut highlights = None;
4532 if let Some((captures, highlight_maps)) = syntax {
4533 highlights = Some(BufferChunkHighlights {
4534 captures,
4535 next_capture: None,
4536 stack: Default::default(),
4537 highlight_maps,
4538 })
4539 }
4540
4541 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4542 let chunks = text.chunks_in_range(range.clone());
4543
4544 let mut this = BufferChunks {
4545 range,
4546 buffer_snapshot,
4547 chunks,
4548 diagnostic_endpoints,
4549 error_depth: 0,
4550 warning_depth: 0,
4551 information_depth: 0,
4552 hint_depth: 0,
4553 unnecessary_depth: 0,
4554 underline: true,
4555 highlights,
4556 };
4557 this.initialize_diagnostic_endpoints();
4558 this
4559 }
4560
4561 /// Seeks to the given byte offset in the buffer.
4562 pub fn seek(&mut self, range: Range<usize>) {
4563 let old_range = std::mem::replace(&mut self.range, range.clone());
4564 self.chunks.set_range(self.range.clone());
4565 if let Some(highlights) = self.highlights.as_mut() {
4566 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4567 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4568 highlights
4569 .stack
4570 .retain(|(end_offset, _)| *end_offset > range.start);
4571 if let Some(capture) = &highlights.next_capture {
4572 if range.start >= capture.node.start_byte() {
4573 let next_capture_end = capture.node.end_byte();
4574 if range.start < next_capture_end {
4575 highlights.stack.push((
4576 next_capture_end,
4577 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4578 ));
4579 }
4580 highlights.next_capture.take();
4581 }
4582 }
4583 } else if let Some(snapshot) = self.buffer_snapshot {
4584 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4585 *highlights = BufferChunkHighlights {
4586 captures,
4587 next_capture: None,
4588 stack: Default::default(),
4589 highlight_maps,
4590 };
4591 } else {
4592 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4593 // Seeking such BufferChunks is not supported.
4594 debug_assert!(
4595 false,
4596 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4597 );
4598 }
4599
4600 highlights.captures.set_byte_range(self.range.clone());
4601 self.initialize_diagnostic_endpoints();
4602 }
4603 }
4604
4605 fn initialize_diagnostic_endpoints(&mut self) {
4606 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4607 if let Some(buffer) = self.buffer_snapshot {
4608 let mut diagnostic_endpoints = Vec::new();
4609 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4610 diagnostic_endpoints.push(DiagnosticEndpoint {
4611 offset: entry.range.start,
4612 is_start: true,
4613 severity: entry.diagnostic.severity,
4614 is_unnecessary: entry.diagnostic.is_unnecessary,
4615 underline: entry.diagnostic.underline,
4616 });
4617 diagnostic_endpoints.push(DiagnosticEndpoint {
4618 offset: entry.range.end,
4619 is_start: false,
4620 severity: entry.diagnostic.severity,
4621 is_unnecessary: entry.diagnostic.is_unnecessary,
4622 underline: entry.diagnostic.underline,
4623 });
4624 }
4625 diagnostic_endpoints
4626 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4627 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4628 self.hint_depth = 0;
4629 self.error_depth = 0;
4630 self.warning_depth = 0;
4631 self.information_depth = 0;
4632 }
4633 }
4634 }
4635
4636 /// The current byte offset in the buffer.
4637 pub fn offset(&self) -> usize {
4638 self.range.start
4639 }
4640
4641 pub fn range(&self) -> Range<usize> {
4642 self.range.clone()
4643 }
4644
4645 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4646 let depth = match endpoint.severity {
4647 DiagnosticSeverity::ERROR => &mut self.error_depth,
4648 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4649 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4650 DiagnosticSeverity::HINT => &mut self.hint_depth,
4651 _ => return,
4652 };
4653 if endpoint.is_start {
4654 *depth += 1;
4655 } else {
4656 *depth -= 1;
4657 }
4658
4659 if endpoint.is_unnecessary {
4660 if endpoint.is_start {
4661 self.unnecessary_depth += 1;
4662 } else {
4663 self.unnecessary_depth -= 1;
4664 }
4665 }
4666 }
4667
4668 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4669 if self.error_depth > 0 {
4670 Some(DiagnosticSeverity::ERROR)
4671 } else if self.warning_depth > 0 {
4672 Some(DiagnosticSeverity::WARNING)
4673 } else if self.information_depth > 0 {
4674 Some(DiagnosticSeverity::INFORMATION)
4675 } else if self.hint_depth > 0 {
4676 Some(DiagnosticSeverity::HINT)
4677 } else {
4678 None
4679 }
4680 }
4681
4682 fn current_code_is_unnecessary(&self) -> bool {
4683 self.unnecessary_depth > 0
4684 }
4685}
4686
4687impl<'a> Iterator for BufferChunks<'a> {
4688 type Item = Chunk<'a>;
4689
4690 fn next(&mut self) -> Option<Self::Item> {
4691 let mut next_capture_start = usize::MAX;
4692 let mut next_diagnostic_endpoint = usize::MAX;
4693
4694 if let Some(highlights) = self.highlights.as_mut() {
4695 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4696 if *parent_capture_end <= self.range.start {
4697 highlights.stack.pop();
4698 } else {
4699 break;
4700 }
4701 }
4702
4703 if highlights.next_capture.is_none() {
4704 highlights.next_capture = highlights.captures.next();
4705 }
4706
4707 while let Some(capture) = highlights.next_capture.as_ref() {
4708 if self.range.start < capture.node.start_byte() {
4709 next_capture_start = capture.node.start_byte();
4710 break;
4711 } else {
4712 let highlight_id =
4713 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4714 highlights
4715 .stack
4716 .push((capture.node.end_byte(), highlight_id));
4717 highlights.next_capture = highlights.captures.next();
4718 }
4719 }
4720 }
4721
4722 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4723 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4724 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4725 if endpoint.offset <= self.range.start {
4726 self.update_diagnostic_depths(endpoint);
4727 diagnostic_endpoints.next();
4728 self.underline = endpoint.underline;
4729 } else {
4730 next_diagnostic_endpoint = endpoint.offset;
4731 break;
4732 }
4733 }
4734 }
4735 self.diagnostic_endpoints = diagnostic_endpoints;
4736
4737 if let Some(chunk) = self.chunks.peek() {
4738 let chunk_start = self.range.start;
4739 let mut chunk_end = (self.chunks.offset() + chunk.len())
4740 .min(next_capture_start)
4741 .min(next_diagnostic_endpoint);
4742 let mut highlight_id = None;
4743 if let Some(highlights) = self.highlights.as_ref() {
4744 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4745 chunk_end = chunk_end.min(*parent_capture_end);
4746 highlight_id = Some(*parent_highlight_id);
4747 }
4748 }
4749
4750 let slice =
4751 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4752 self.range.start = chunk_end;
4753 if self.range.start == self.chunks.offset() + chunk.len() {
4754 self.chunks.next().unwrap();
4755 }
4756
4757 Some(Chunk {
4758 text: slice,
4759 syntax_highlight_id: highlight_id,
4760 underline: self.underline,
4761 diagnostic_severity: self.current_diagnostic_severity(),
4762 is_unnecessary: self.current_code_is_unnecessary(),
4763 ..Chunk::default()
4764 })
4765 } else {
4766 None
4767 }
4768 }
4769}
4770
4771impl operation_queue::Operation for Operation {
4772 fn lamport_timestamp(&self) -> clock::Lamport {
4773 match self {
4774 Operation::Buffer(_) => {
4775 unreachable!("buffer operations should never be deferred at this layer")
4776 }
4777 Operation::UpdateDiagnostics {
4778 lamport_timestamp, ..
4779 }
4780 | Operation::UpdateSelections {
4781 lamport_timestamp, ..
4782 }
4783 | Operation::UpdateCompletionTriggers {
4784 lamport_timestamp, ..
4785 } => *lamport_timestamp,
4786 }
4787 }
4788}
4789
4790impl Default for Diagnostic {
4791 fn default() -> Self {
4792 Self {
4793 source: Default::default(),
4794 source_kind: DiagnosticSourceKind::Other,
4795 code: None,
4796 code_description: None,
4797 severity: DiagnosticSeverity::ERROR,
4798 message: Default::default(),
4799 markdown: None,
4800 group_id: 0,
4801 is_primary: false,
4802 is_disk_based: false,
4803 is_unnecessary: false,
4804 underline: true,
4805 data: None,
4806 }
4807 }
4808}
4809
4810impl IndentSize {
4811 /// Returns an [`IndentSize`] representing the given spaces.
4812 pub fn spaces(len: u32) -> Self {
4813 Self {
4814 len,
4815 kind: IndentKind::Space,
4816 }
4817 }
4818
4819 /// Returns an [`IndentSize`] representing a tab.
4820 pub fn tab() -> Self {
4821 Self {
4822 len: 1,
4823 kind: IndentKind::Tab,
4824 }
4825 }
4826
4827 /// An iterator over the characters represented by this [`IndentSize`].
4828 pub fn chars(&self) -> impl Iterator<Item = char> {
4829 iter::repeat(self.char()).take(self.len as usize)
4830 }
4831
4832 /// The character representation of this [`IndentSize`].
4833 pub fn char(&self) -> char {
4834 match self.kind {
4835 IndentKind::Space => ' ',
4836 IndentKind::Tab => '\t',
4837 }
4838 }
4839
4840 /// Consumes the current [`IndentSize`] and returns a new one that has
4841 /// been shrunk or enlarged by the given size along the given direction.
4842 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4843 match direction {
4844 Ordering::Less => {
4845 if self.kind == size.kind && self.len >= size.len {
4846 self.len -= size.len;
4847 }
4848 }
4849 Ordering::Equal => {}
4850 Ordering::Greater => {
4851 if self.len == 0 {
4852 self = size;
4853 } else if self.kind == size.kind {
4854 self.len += size.len;
4855 }
4856 }
4857 }
4858 self
4859 }
4860
4861 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4862 match self.kind {
4863 IndentKind::Space => self.len as usize,
4864 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4865 }
4866 }
4867}
4868
4869#[cfg(any(test, feature = "test-support"))]
4870pub struct TestFile {
4871 pub path: Arc<Path>,
4872 pub root_name: String,
4873 pub local_root: Option<PathBuf>,
4874}
4875
4876#[cfg(any(test, feature = "test-support"))]
4877impl File for TestFile {
4878 fn path(&self) -> &Arc<Path> {
4879 &self.path
4880 }
4881
4882 fn full_path(&self, _: &gpui::App) -> PathBuf {
4883 PathBuf::from(&self.root_name).join(self.path.as_ref())
4884 }
4885
4886 fn as_local(&self) -> Option<&dyn LocalFile> {
4887 if self.local_root.is_some() {
4888 Some(self)
4889 } else {
4890 None
4891 }
4892 }
4893
4894 fn disk_state(&self) -> DiskState {
4895 unimplemented!()
4896 }
4897
4898 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4899 self.path().file_name().unwrap_or(self.root_name.as_ref())
4900 }
4901
4902 fn worktree_id(&self, _: &App) -> WorktreeId {
4903 WorktreeId::from_usize(0)
4904 }
4905
4906 fn to_proto(&self, _: &App) -> rpc::proto::File {
4907 unimplemented!()
4908 }
4909
4910 fn is_private(&self) -> bool {
4911 false
4912 }
4913}
4914
4915#[cfg(any(test, feature = "test-support"))]
4916impl LocalFile for TestFile {
4917 fn abs_path(&self, _cx: &App) -> PathBuf {
4918 PathBuf::from(self.local_root.as_ref().unwrap())
4919 .join(&self.root_name)
4920 .join(self.path.as_ref())
4921 }
4922
4923 fn load(&self, _cx: &App) -> Task<Result<String>> {
4924 unimplemented!()
4925 }
4926
4927 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4928 unimplemented!()
4929 }
4930}
4931
4932pub(crate) fn contiguous_ranges(
4933 values: impl Iterator<Item = u32>,
4934 max_len: usize,
4935) -> impl Iterator<Item = Range<u32>> {
4936 let mut values = values;
4937 let mut current_range: Option<Range<u32>> = None;
4938 std::iter::from_fn(move || {
4939 loop {
4940 if let Some(value) = values.next() {
4941 if let Some(range) = &mut current_range {
4942 if value == range.end && range.len() < max_len {
4943 range.end += 1;
4944 continue;
4945 }
4946 }
4947
4948 let prev_range = current_range.clone();
4949 current_range = Some(value..(value + 1));
4950 if prev_range.is_some() {
4951 return prev_range;
4952 }
4953 } else {
4954 return current_range.take();
4955 }
4956 }
4957 })
4958}
4959
4960#[derive(Default, Debug)]
4961pub struct CharClassifier {
4962 scope: Option<LanguageScope>,
4963 for_completion: bool,
4964 ignore_punctuation: bool,
4965}
4966
4967impl CharClassifier {
4968 pub fn new(scope: Option<LanguageScope>) -> Self {
4969 Self {
4970 scope,
4971 for_completion: false,
4972 ignore_punctuation: false,
4973 }
4974 }
4975
4976 pub fn for_completion(self, for_completion: bool) -> Self {
4977 Self {
4978 for_completion,
4979 ..self
4980 }
4981 }
4982
4983 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4984 Self {
4985 ignore_punctuation,
4986 ..self
4987 }
4988 }
4989
4990 pub fn is_whitespace(&self, c: char) -> bool {
4991 self.kind(c) == CharKind::Whitespace
4992 }
4993
4994 pub fn is_word(&self, c: char) -> bool {
4995 self.kind(c) == CharKind::Word
4996 }
4997
4998 pub fn is_punctuation(&self, c: char) -> bool {
4999 self.kind(c) == CharKind::Punctuation
5000 }
5001
5002 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5003 if c.is_alphanumeric() || c == '_' {
5004 return CharKind::Word;
5005 }
5006
5007 if let Some(scope) = &self.scope {
5008 let characters = if self.for_completion {
5009 scope.completion_query_characters()
5010 } else {
5011 scope.word_characters()
5012 };
5013 if let Some(characters) = characters {
5014 if characters.contains(&c) {
5015 return CharKind::Word;
5016 }
5017 }
5018 }
5019
5020 if c.is_whitespace() {
5021 return CharKind::Whitespace;
5022 }
5023
5024 if ignore_punctuation {
5025 CharKind::Word
5026 } else {
5027 CharKind::Punctuation
5028 }
5029 }
5030
5031 pub fn kind(&self, c: char) -> CharKind {
5032 self.kind_with(c, self.ignore_punctuation)
5033 }
5034}
5035
5036/// Find all of the ranges of whitespace that occur at the ends of lines
5037/// in the given rope.
5038///
5039/// This could also be done with a regex search, but this implementation
5040/// avoids copying text.
5041pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5042 let mut ranges = Vec::new();
5043
5044 let mut offset = 0;
5045 let mut prev_chunk_trailing_whitespace_range = 0..0;
5046 for chunk in rope.chunks() {
5047 let mut prev_line_trailing_whitespace_range = 0..0;
5048 for (i, line) in chunk.split('\n').enumerate() {
5049 let line_end_offset = offset + line.len();
5050 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5051 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5052
5053 if i == 0 && trimmed_line_len == 0 {
5054 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5055 }
5056 if !prev_line_trailing_whitespace_range.is_empty() {
5057 ranges.push(prev_line_trailing_whitespace_range);
5058 }
5059
5060 offset = line_end_offset + 1;
5061 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5062 }
5063
5064 offset -= 1;
5065 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5066 }
5067
5068 if !prev_chunk_trailing_whitespace_range.is_empty() {
5069 ranges.push(prev_chunk_trailing_whitespace_range);
5070 }
5071
5072 ranges
5073}