1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 &syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 &syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 &syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 &syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation {
1162 if let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169 }
1170
1171 fn on_base_buffer_event(
1172 &mut self,
1173 _: Entity<Buffer>,
1174 event: &BufferEvent,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let BufferEvent::Operation { operation, .. } = event else {
1178 return;
1179 };
1180 let Some(BufferBranchState {
1181 merged_operations, ..
1182 }) = &mut self.branch_state
1183 else {
1184 return;
1185 };
1186
1187 let mut operation_to_undo = None;
1188 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1189 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193 }
1194
1195 self.apply_ops([operation.clone()], cx);
1196
1197 if let Some(timestamp) = operation_to_undo {
1198 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1199 self.undo_operations(counts, cx);
1200 }
1201 }
1202
1203 #[cfg(test)]
1204 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1205 &self.text
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's raw text, without any
1209 /// language-related state like the syntax tree or diagnostics.
1210 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1211 self.text.snapshot()
1212 }
1213
1214 /// The file associated with the buffer, if any.
1215 pub fn file(&self) -> Option<&Arc<dyn File>> {
1216 self.file.as_ref()
1217 }
1218
1219 /// The version of the buffer that was last saved or reloaded from disk.
1220 pub fn saved_version(&self) -> &clock::Global {
1221 &self.saved_version
1222 }
1223
1224 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1225 pub fn saved_mtime(&self) -> Option<MTime> {
1226 self.saved_mtime
1227 }
1228
1229 /// Assign a language to the buffer.
1230 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1231 self.non_text_state_update_count += 1;
1232 self.syntax_map.lock().clear(&self.text);
1233 self.language = language;
1234 self.was_changed();
1235 self.reparse(cx);
1236 cx.emit(BufferEvent::LanguageChanged);
1237 }
1238
1239 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1240 /// other languages if parts of the buffer are written in different languages.
1241 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1242 self.syntax_map
1243 .lock()
1244 .set_language_registry(language_registry);
1245 }
1246
1247 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1248 self.syntax_map.lock().language_registry()
1249 }
1250
1251 /// Assign the buffer a new [`Capability`].
1252 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1253 self.capability = capability;
1254 cx.emit(BufferEvent::CapabilityChanged)
1255 }
1256
1257 /// This method is called to signal that the buffer has been saved.
1258 pub fn did_save(
1259 &mut self,
1260 version: clock::Global,
1261 mtime: Option<MTime>,
1262 cx: &mut Context<Self>,
1263 ) {
1264 self.saved_version = version;
1265 self.has_unsaved_edits
1266 .set((self.saved_version().clone(), false));
1267 self.has_conflict = false;
1268 self.saved_mtime = mtime;
1269 self.was_changed();
1270 cx.emit(BufferEvent::Saved);
1271 cx.notify();
1272 }
1273
1274 /// This method is called to signal that the buffer has been discarded.
1275 pub fn discarded(&self, cx: &mut Context<Self>) {
1276 cx.emit(BufferEvent::Discarded);
1277 cx.notify();
1278 }
1279
1280 /// Reloads the contents of the buffer from disk.
1281 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1282 let (tx, rx) = futures::channel::oneshot::channel();
1283 let prev_version = self.text.version();
1284 self.reload_task = Some(cx.spawn(async move |this, cx| {
1285 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1286 let file = this.file.as_ref()?.as_local()?;
1287
1288 Some((file.disk_state().mtime(), file.load(cx)))
1289 })?
1290 else {
1291 return Ok(());
1292 };
1293
1294 let new_text = new_text.await?;
1295 let diff = this
1296 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1297 .await;
1298 this.update(cx, |this, cx| {
1299 if this.version() == diff.base_version {
1300 this.finalize_last_transaction();
1301 this.apply_diff(diff, cx);
1302 tx.send(this.finalize_last_transaction().cloned()).ok();
1303 this.has_conflict = false;
1304 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1305 } else {
1306 if !diff.edits.is_empty()
1307 || this
1308 .edits_since::<usize>(&diff.base_version)
1309 .next()
1310 .is_some()
1311 {
1312 this.has_conflict = true;
1313 }
1314
1315 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1316 }
1317
1318 this.reload_task.take();
1319 })
1320 }));
1321 rx
1322 }
1323
1324 /// This method is called to signal that the buffer has been reloaded.
1325 pub fn did_reload(
1326 &mut self,
1327 version: clock::Global,
1328 line_ending: LineEnding,
1329 mtime: Option<MTime>,
1330 cx: &mut Context<Self>,
1331 ) {
1332 self.saved_version = version;
1333 self.has_unsaved_edits
1334 .set((self.saved_version.clone(), false));
1335 self.text.set_line_ending(line_ending);
1336 self.saved_mtime = mtime;
1337 cx.emit(BufferEvent::Reloaded);
1338 cx.notify();
1339 }
1340
1341 /// Updates the [`File`] backing this buffer. This should be called when
1342 /// the file has changed or has been deleted.
1343 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1344 let was_dirty = self.is_dirty();
1345 let mut file_changed = false;
1346
1347 if let Some(old_file) = self.file.as_ref() {
1348 if new_file.path() != old_file.path() {
1349 file_changed = true;
1350 }
1351
1352 let old_state = old_file.disk_state();
1353 let new_state = new_file.disk_state();
1354 if old_state != new_state {
1355 file_changed = true;
1356 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1357 cx.emit(BufferEvent::ReloadNeeded)
1358 }
1359 }
1360 } else {
1361 file_changed = true;
1362 };
1363
1364 self.file = Some(new_file);
1365 if file_changed {
1366 self.was_changed();
1367 self.non_text_state_update_count += 1;
1368 if was_dirty != self.is_dirty() {
1369 cx.emit(BufferEvent::DirtyChanged);
1370 }
1371 cx.emit(BufferEvent::FileHandleChanged);
1372 cx.notify();
1373 }
1374 }
1375
1376 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1377 Some(self.branch_state.as_ref()?.base_buffer.clone())
1378 }
1379
1380 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1381 pub fn language(&self) -> Option<&Arc<Language>> {
1382 self.language.as_ref()
1383 }
1384
1385 /// Returns the [`Language`] at the given location.
1386 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1387 let offset = position.to_offset(self);
1388 let mut is_first = true;
1389 let start_anchor = self.anchor_before(offset);
1390 let end_anchor = self.anchor_after(offset);
1391 self.syntax_map
1392 .lock()
1393 .layers_for_range(offset..offset, &self.text, false)
1394 .filter(|layer| {
1395 if is_first {
1396 is_first = false;
1397 return true;
1398 }
1399 let any_sub_ranges_contain_range = layer
1400 .included_sub_ranges
1401 .map(|sub_ranges| {
1402 sub_ranges.iter().any(|sub_range| {
1403 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1404 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1405 !is_before_start && !is_after_end
1406 })
1407 })
1408 .unwrap_or(true);
1409 let result = any_sub_ranges_contain_range;
1410 return result;
1411 })
1412 .last()
1413 .map(|info| info.language.clone())
1414 .or_else(|| self.language.clone())
1415 }
1416
1417 /// Returns each [`Language`] for the active syntax layers at the given location.
1418 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1419 let offset = position.to_offset(self);
1420 let mut languages: Vec<Arc<Language>> = self
1421 .syntax_map
1422 .lock()
1423 .layers_for_range(offset..offset, &self.text, false)
1424 .map(|info| info.language.clone())
1425 .collect();
1426
1427 if languages.is_empty() {
1428 if let Some(buffer_language) = self.language() {
1429 languages.push(buffer_language.clone());
1430 }
1431 }
1432
1433 languages
1434 }
1435
1436 /// An integer version number that accounts for all updates besides
1437 /// the buffer's text itself (which is versioned via a version vector).
1438 pub fn non_text_state_update_count(&self) -> usize {
1439 self.non_text_state_update_count
1440 }
1441
1442 /// Whether the buffer is being parsed in the background.
1443 #[cfg(any(test, feature = "test-support"))]
1444 pub fn is_parsing(&self) -> bool {
1445 self.reparse.is_some()
1446 }
1447
1448 /// Indicates whether the buffer contains any regions that may be
1449 /// written in a language that hasn't been loaded yet.
1450 pub fn contains_unknown_injections(&self) -> bool {
1451 self.syntax_map.lock().contains_unknown_injections()
1452 }
1453
1454 #[cfg(any(test, feature = "test-support"))]
1455 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1456 self.sync_parse_timeout = timeout;
1457 }
1458
1459 /// Called after an edit to synchronize the buffer's main parse tree with
1460 /// the buffer's new underlying state.
1461 ///
1462 /// Locks the syntax map and interpolates the edits since the last reparse
1463 /// into the foreground syntax tree.
1464 ///
1465 /// Then takes a stable snapshot of the syntax map before unlocking it.
1466 /// The snapshot with the interpolated edits is sent to a background thread,
1467 /// where we ask Tree-sitter to perform an incremental parse.
1468 ///
1469 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1470 /// waiting on the parse to complete. As soon as it completes, we proceed
1471 /// synchronously, unless a 1ms timeout elapses.
1472 ///
1473 /// If we time out waiting on the parse, we spawn a second task waiting
1474 /// until the parse does complete and return with the interpolated tree still
1475 /// in the foreground. When the background parse completes, call back into
1476 /// the main thread and assign the foreground parse state.
1477 ///
1478 /// If the buffer or grammar changed since the start of the background parse,
1479 /// initiate an additional reparse recursively. To avoid concurrent parses
1480 /// for the same buffer, we only initiate a new parse if we are not already
1481 /// parsing in the background.
1482 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1483 if self.reparse.is_some() {
1484 return;
1485 }
1486 let language = if let Some(language) = self.language.clone() {
1487 language
1488 } else {
1489 return;
1490 };
1491
1492 let text = self.text_snapshot();
1493 let parsed_version = self.version();
1494
1495 let mut syntax_map = self.syntax_map.lock();
1496 syntax_map.interpolate(&text);
1497 let language_registry = syntax_map.language_registry();
1498 let mut syntax_snapshot = syntax_map.snapshot();
1499 drop(syntax_map);
1500
1501 let parse_task = cx.background_spawn({
1502 let language = language.clone();
1503 let language_registry = language_registry.clone();
1504 async move {
1505 syntax_snapshot.reparse(&text, language_registry, language);
1506 syntax_snapshot
1507 }
1508 });
1509
1510 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1511 match cx
1512 .background_executor()
1513 .block_with_timeout(self.sync_parse_timeout, parse_task)
1514 {
1515 Ok(new_syntax_snapshot) => {
1516 self.did_finish_parsing(new_syntax_snapshot, cx);
1517 self.reparse = None;
1518 }
1519 Err(parse_task) => {
1520 self.reparse = Some(cx.spawn(async move |this, cx| {
1521 let new_syntax_map = parse_task.await;
1522 this.update(cx, move |this, cx| {
1523 let grammar_changed =
1524 this.language.as_ref().map_or(true, |current_language| {
1525 !Arc::ptr_eq(&language, current_language)
1526 });
1527 let language_registry_changed = new_syntax_map
1528 .contains_unknown_injections()
1529 && language_registry.map_or(false, |registry| {
1530 registry.version() != new_syntax_map.language_registry_version()
1531 });
1532 let parse_again = language_registry_changed
1533 || grammar_changed
1534 || this.version.changed_since(&parsed_version);
1535 this.did_finish_parsing(new_syntax_map, cx);
1536 this.reparse = None;
1537 if parse_again {
1538 this.reparse(cx);
1539 }
1540 })
1541 .ok();
1542 }));
1543 }
1544 }
1545 }
1546
1547 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1548 self.was_changed();
1549 self.non_text_state_update_count += 1;
1550 self.syntax_map.lock().did_parse(syntax_snapshot);
1551 self.request_autoindent(cx);
1552 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1553 cx.emit(BufferEvent::Reparsed);
1554 cx.notify();
1555 }
1556
1557 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1558 self.parse_status.1.clone()
1559 }
1560
1561 /// Assign to the buffer a set of diagnostics created by a given language server.
1562 pub fn update_diagnostics(
1563 &mut self,
1564 server_id: LanguageServerId,
1565 diagnostics: DiagnosticSet,
1566 cx: &mut Context<Self>,
1567 ) {
1568 let lamport_timestamp = self.text.lamport_clock.tick();
1569 let op = Operation::UpdateDiagnostics {
1570 server_id,
1571 diagnostics: diagnostics.iter().cloned().collect(),
1572 lamport_timestamp,
1573 };
1574 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1575 self.send_operation(op, true, cx);
1576 }
1577
1578 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1579 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1580 return None;
1581 };
1582 Some(&self.diagnostics[idx].1)
1583 }
1584
1585 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1586 if let Some(indent_sizes) = self.compute_autoindents() {
1587 let indent_sizes = cx.background_spawn(indent_sizes);
1588 match cx
1589 .background_executor()
1590 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1591 {
1592 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1593 Err(indent_sizes) => {
1594 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1595 let indent_sizes = indent_sizes.await;
1596 this.update(cx, |this, cx| {
1597 this.apply_autoindents(indent_sizes, cx);
1598 })
1599 .ok();
1600 }));
1601 }
1602 }
1603 } else {
1604 self.autoindent_requests.clear();
1605 for tx in self.wait_for_autoindent_txs.drain(..) {
1606 tx.send(()).ok();
1607 }
1608 }
1609 }
1610
1611 fn compute_autoindents(
1612 &self,
1613 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1614 let max_rows_between_yields = 100;
1615 let snapshot = self.snapshot();
1616 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1617 return None;
1618 }
1619
1620 let autoindent_requests = self.autoindent_requests.clone();
1621 Some(async move {
1622 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1623 for request in autoindent_requests {
1624 // Resolve each edited range to its row in the current buffer and in the
1625 // buffer before this batch of edits.
1626 let mut row_ranges = Vec::new();
1627 let mut old_to_new_rows = BTreeMap::new();
1628 let mut language_indent_sizes_by_new_row = Vec::new();
1629 for entry in &request.entries {
1630 let position = entry.range.start;
1631 let new_row = position.to_point(&snapshot).row;
1632 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1633 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1634
1635 if !entry.first_line_is_new {
1636 let old_row = position.to_point(&request.before_edit).row;
1637 old_to_new_rows.insert(old_row, new_row);
1638 }
1639 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1640 }
1641
1642 // Build a map containing the suggested indentation for each of the edited lines
1643 // with respect to the state of the buffer before these edits. This map is keyed
1644 // by the rows for these lines in the current state of the buffer.
1645 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1646 let old_edited_ranges =
1647 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1648 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1649 let mut language_indent_size = IndentSize::default();
1650 for old_edited_range in old_edited_ranges {
1651 let suggestions = request
1652 .before_edit
1653 .suggest_autoindents(old_edited_range.clone())
1654 .into_iter()
1655 .flatten();
1656 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1657 if let Some(suggestion) = suggestion {
1658 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1659
1660 // Find the indent size based on the language for this row.
1661 while let Some((row, size)) = language_indent_sizes.peek() {
1662 if *row > new_row {
1663 break;
1664 }
1665 language_indent_size = *size;
1666 language_indent_sizes.next();
1667 }
1668
1669 let suggested_indent = old_to_new_rows
1670 .get(&suggestion.basis_row)
1671 .and_then(|from_row| {
1672 Some(old_suggestions.get(from_row).copied()?.0)
1673 })
1674 .unwrap_or_else(|| {
1675 request
1676 .before_edit
1677 .indent_size_for_line(suggestion.basis_row)
1678 })
1679 .with_delta(suggestion.delta, language_indent_size);
1680 old_suggestions
1681 .insert(new_row, (suggested_indent, suggestion.within_error));
1682 }
1683 }
1684 yield_now().await;
1685 }
1686
1687 // Compute new suggestions for each line, but only include them in the result
1688 // if they differ from the old suggestion for that line.
1689 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1690 let mut language_indent_size = IndentSize::default();
1691 for (row_range, original_indent_column) in row_ranges {
1692 let new_edited_row_range = if request.is_block_mode {
1693 row_range.start..row_range.start + 1
1694 } else {
1695 row_range.clone()
1696 };
1697
1698 let suggestions = snapshot
1699 .suggest_autoindents(new_edited_row_range.clone())
1700 .into_iter()
1701 .flatten();
1702 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1703 if let Some(suggestion) = suggestion {
1704 // Find the indent size based on the language for this row.
1705 while let Some((row, size)) = language_indent_sizes.peek() {
1706 if *row > new_row {
1707 break;
1708 }
1709 language_indent_size = *size;
1710 language_indent_sizes.next();
1711 }
1712
1713 let suggested_indent = indent_sizes
1714 .get(&suggestion.basis_row)
1715 .copied()
1716 .map(|e| e.0)
1717 .unwrap_or_else(|| {
1718 snapshot.indent_size_for_line(suggestion.basis_row)
1719 })
1720 .with_delta(suggestion.delta, language_indent_size);
1721
1722 if old_suggestions.get(&new_row).map_or(
1723 true,
1724 |(old_indentation, was_within_error)| {
1725 suggested_indent != *old_indentation
1726 && (!suggestion.within_error || *was_within_error)
1727 },
1728 ) {
1729 indent_sizes.insert(
1730 new_row,
1731 (suggested_indent, request.ignore_empty_lines),
1732 );
1733 }
1734 }
1735 }
1736
1737 if let (true, Some(original_indent_column)) =
1738 (request.is_block_mode, original_indent_column)
1739 {
1740 let new_indent =
1741 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1742 *indent
1743 } else {
1744 snapshot.indent_size_for_line(row_range.start)
1745 };
1746 let delta = new_indent.len as i64 - original_indent_column as i64;
1747 if delta != 0 {
1748 for row in row_range.skip(1) {
1749 indent_sizes.entry(row).or_insert_with(|| {
1750 let mut size = snapshot.indent_size_for_line(row);
1751 if size.kind == new_indent.kind {
1752 match delta.cmp(&0) {
1753 Ordering::Greater => size.len += delta as u32,
1754 Ordering::Less => {
1755 size.len = size.len.saturating_sub(-delta as u32)
1756 }
1757 Ordering::Equal => {}
1758 }
1759 }
1760 (size, request.ignore_empty_lines)
1761 });
1762 }
1763 }
1764 }
1765
1766 yield_now().await;
1767 }
1768 }
1769
1770 indent_sizes
1771 .into_iter()
1772 .filter_map(|(row, (indent, ignore_empty_lines))| {
1773 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1774 None
1775 } else {
1776 Some((row, indent))
1777 }
1778 })
1779 .collect()
1780 })
1781 }
1782
1783 fn apply_autoindents(
1784 &mut self,
1785 indent_sizes: BTreeMap<u32, IndentSize>,
1786 cx: &mut Context<Self>,
1787 ) {
1788 self.autoindent_requests.clear();
1789 for tx in self.wait_for_autoindent_txs.drain(..) {
1790 tx.send(()).ok();
1791 }
1792
1793 let edits: Vec<_> = indent_sizes
1794 .into_iter()
1795 .filter_map(|(row, indent_size)| {
1796 let current_size = indent_size_for_line(self, row);
1797 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1798 })
1799 .collect();
1800
1801 let preserve_preview = self.preserve_preview();
1802 self.edit(edits, None, cx);
1803 if preserve_preview {
1804 self.refresh_preview();
1805 }
1806 }
1807
1808 /// Create a minimal edit that will cause the given row to be indented
1809 /// with the given size. After applying this edit, the length of the line
1810 /// will always be at least `new_size.len`.
1811 pub fn edit_for_indent_size_adjustment(
1812 row: u32,
1813 current_size: IndentSize,
1814 new_size: IndentSize,
1815 ) -> Option<(Range<Point>, String)> {
1816 if new_size.kind == current_size.kind {
1817 match new_size.len.cmp(¤t_size.len) {
1818 Ordering::Greater => {
1819 let point = Point::new(row, 0);
1820 Some((
1821 point..point,
1822 iter::repeat(new_size.char())
1823 .take((new_size.len - current_size.len) as usize)
1824 .collect::<String>(),
1825 ))
1826 }
1827
1828 Ordering::Less => Some((
1829 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1830 String::new(),
1831 )),
1832
1833 Ordering::Equal => None,
1834 }
1835 } else {
1836 Some((
1837 Point::new(row, 0)..Point::new(row, current_size.len),
1838 iter::repeat(new_size.char())
1839 .take(new_size.len as usize)
1840 .collect::<String>(),
1841 ))
1842 }
1843 }
1844
1845 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1846 /// and the given new text.
1847 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1848 let old_text = self.as_rope().clone();
1849 let base_version = self.version();
1850 cx.background_executor()
1851 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1852 let old_text = old_text.to_string();
1853 let line_ending = LineEnding::detect(&new_text);
1854 LineEnding::normalize(&mut new_text);
1855 let edits = text_diff(&old_text, &new_text);
1856 Diff {
1857 base_version,
1858 line_ending,
1859 edits,
1860 }
1861 })
1862 }
1863
1864 /// Spawns a background task that searches the buffer for any whitespace
1865 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1866 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1867 let old_text = self.as_rope().clone();
1868 let line_ending = self.line_ending();
1869 let base_version = self.version();
1870 cx.background_spawn(async move {
1871 let ranges = trailing_whitespace_ranges(&old_text);
1872 let empty = Arc::<str>::from("");
1873 Diff {
1874 base_version,
1875 line_ending,
1876 edits: ranges
1877 .into_iter()
1878 .map(|range| (range, empty.clone()))
1879 .collect(),
1880 }
1881 })
1882 }
1883
1884 /// Ensures that the buffer ends with a single newline character, and
1885 /// no other whitespace. Skips if the buffer is empty.
1886 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1887 let len = self.len();
1888 if len == 0 {
1889 return;
1890 }
1891 let mut offset = len;
1892 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1893 let non_whitespace_len = chunk
1894 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1895 .len();
1896 offset -= chunk.len();
1897 offset += non_whitespace_len;
1898 if non_whitespace_len != 0 {
1899 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1900 return;
1901 }
1902 break;
1903 }
1904 }
1905 self.edit([(offset..len, "\n")], None, cx);
1906 }
1907
1908 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1909 /// calculated, then adjust the diff to account for those changes, and discard any
1910 /// parts of the diff that conflict with those changes.
1911 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1912 let snapshot = self.snapshot();
1913 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1914 let mut delta = 0;
1915 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1916 while let Some(edit_since) = edits_since.peek() {
1917 // If the edit occurs after a diff hunk, then it does not
1918 // affect that hunk.
1919 if edit_since.old.start > range.end {
1920 break;
1921 }
1922 // If the edit precedes the diff hunk, then adjust the hunk
1923 // to reflect the edit.
1924 else if edit_since.old.end < range.start {
1925 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1926 edits_since.next();
1927 }
1928 // If the edit intersects a diff hunk, then discard that hunk.
1929 else {
1930 return None;
1931 }
1932 }
1933
1934 let start = (range.start as i64 + delta) as usize;
1935 let end = (range.end as i64 + delta) as usize;
1936 Some((start..end, new_text))
1937 });
1938
1939 self.start_transaction();
1940 self.text.set_line_ending(diff.line_ending);
1941 self.edit(adjusted_edits, None, cx);
1942 self.end_transaction(cx)
1943 }
1944
1945 fn has_unsaved_edits(&self) -> bool {
1946 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1947
1948 if last_version == self.version {
1949 self.has_unsaved_edits
1950 .set((last_version, has_unsaved_edits));
1951 return has_unsaved_edits;
1952 }
1953
1954 let has_edits = self.has_edits_since(&self.saved_version);
1955 self.has_unsaved_edits
1956 .set((self.version.clone(), has_edits));
1957 has_edits
1958 }
1959
1960 /// Checks if the buffer has unsaved changes.
1961 pub fn is_dirty(&self) -> bool {
1962 if self.capability == Capability::ReadOnly {
1963 return false;
1964 }
1965 if self.has_conflict {
1966 return true;
1967 }
1968 match self.file.as_ref().map(|f| f.disk_state()) {
1969 Some(DiskState::New) | Some(DiskState::Deleted) => {
1970 !self.is_empty() && self.has_unsaved_edits()
1971 }
1972 _ => self.has_unsaved_edits(),
1973 }
1974 }
1975
1976 /// Checks if the buffer and its file have both changed since the buffer
1977 /// was last saved or reloaded.
1978 pub fn has_conflict(&self) -> bool {
1979 if self.has_conflict {
1980 return true;
1981 }
1982 let Some(file) = self.file.as_ref() else {
1983 return false;
1984 };
1985 match file.disk_state() {
1986 DiskState::New => false,
1987 DiskState::Present { mtime } => match self.saved_mtime {
1988 Some(saved_mtime) => {
1989 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1990 }
1991 None => true,
1992 },
1993 DiskState::Deleted => false,
1994 }
1995 }
1996
1997 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1998 pub fn subscribe(&mut self) -> Subscription {
1999 self.text.subscribe()
2000 }
2001
2002 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2003 ///
2004 /// This allows downstream code to check if the buffer's text has changed without
2005 /// waiting for an effect cycle, which would be required if using eents.
2006 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2007 if let Err(ix) = self
2008 .change_bits
2009 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2010 {
2011 self.change_bits.insert(ix, bit);
2012 }
2013 }
2014
2015 fn was_changed(&mut self) {
2016 self.change_bits.retain(|change_bit| {
2017 change_bit.upgrade().map_or(false, |bit| {
2018 bit.replace(true);
2019 true
2020 })
2021 });
2022 }
2023
2024 /// Starts a transaction, if one is not already in-progress. When undoing or
2025 /// redoing edits, all of the edits performed within a transaction are undone
2026 /// or redone together.
2027 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2028 self.start_transaction_at(Instant::now())
2029 }
2030
2031 /// Starts a transaction, providing the current time. Subsequent transactions
2032 /// that occur within a short period of time will be grouped together. This
2033 /// is controlled by the buffer's undo grouping duration.
2034 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2035 self.transaction_depth += 1;
2036 if self.was_dirty_before_starting_transaction.is_none() {
2037 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2038 }
2039 self.text.start_transaction_at(now)
2040 }
2041
2042 /// Terminates the current transaction, if this is the outermost transaction.
2043 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2044 self.end_transaction_at(Instant::now(), cx)
2045 }
2046
2047 /// Terminates the current transaction, providing the current time. Subsequent transactions
2048 /// that occur within a short period of time will be grouped together. This
2049 /// is controlled by the buffer's undo grouping duration.
2050 pub fn end_transaction_at(
2051 &mut self,
2052 now: Instant,
2053 cx: &mut Context<Self>,
2054 ) -> Option<TransactionId> {
2055 assert!(self.transaction_depth > 0);
2056 self.transaction_depth -= 1;
2057 let was_dirty = if self.transaction_depth == 0 {
2058 self.was_dirty_before_starting_transaction.take().unwrap()
2059 } else {
2060 false
2061 };
2062 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2063 self.did_edit(&start_version, was_dirty, cx);
2064 Some(transaction_id)
2065 } else {
2066 None
2067 }
2068 }
2069
2070 /// Manually add a transaction to the buffer's undo history.
2071 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2072 self.text.push_transaction(transaction, now);
2073 }
2074
2075 /// Differs from `push_transaction` in that it does not clear the redo
2076 /// stack. Intended to be used to create a parent transaction to merge
2077 /// potential child transactions into.
2078 ///
2079 /// The caller is responsible for removing it from the undo history using
2080 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2081 /// are merged into this transaction, the caller is responsible for ensuring
2082 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2083 /// cleared is to create transactions with the usual `start_transaction` and
2084 /// `end_transaction` methods and merging the resulting transactions into
2085 /// the transaction created by this method
2086 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2087 self.text.push_empty_transaction(now)
2088 }
2089
2090 /// Prevent the last transaction from being grouped with any subsequent transactions,
2091 /// even if they occur with the buffer's undo grouping duration.
2092 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2093 self.text.finalize_last_transaction()
2094 }
2095
2096 /// Manually group all changes since a given transaction.
2097 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2098 self.text.group_until_transaction(transaction_id);
2099 }
2100
2101 /// Manually remove a transaction from the buffer's undo history
2102 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2103 self.text.forget_transaction(transaction_id)
2104 }
2105
2106 /// Retrieve a transaction from the buffer's undo history
2107 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2108 self.text.get_transaction(transaction_id)
2109 }
2110
2111 /// Manually merge two transactions in the buffer's undo history.
2112 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2113 self.text.merge_transactions(transaction, destination);
2114 }
2115
2116 /// Waits for the buffer to receive operations with the given timestamps.
2117 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2118 &mut self,
2119 edit_ids: It,
2120 ) -> impl Future<Output = Result<()>> + use<It> {
2121 self.text.wait_for_edits(edit_ids)
2122 }
2123
2124 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2125 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2126 &mut self,
2127 anchors: It,
2128 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2129 self.text.wait_for_anchors(anchors)
2130 }
2131
2132 /// Waits for the buffer to receive operations up to the given version.
2133 pub fn wait_for_version(
2134 &mut self,
2135 version: clock::Global,
2136 ) -> impl Future<Output = Result<()>> + use<> {
2137 self.text.wait_for_version(version)
2138 }
2139
2140 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2141 /// [`Buffer::wait_for_version`] to resolve with an error.
2142 pub fn give_up_waiting(&mut self) {
2143 self.text.give_up_waiting();
2144 }
2145
2146 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2147 let mut rx = None;
2148 if !self.autoindent_requests.is_empty() {
2149 let channel = oneshot::channel();
2150 self.wait_for_autoindent_txs.push(channel.0);
2151 rx = Some(channel.1);
2152 }
2153 rx
2154 }
2155
2156 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2157 pub fn set_active_selections(
2158 &mut self,
2159 selections: Arc<[Selection<Anchor>]>,
2160 line_mode: bool,
2161 cursor_shape: CursorShape,
2162 cx: &mut Context<Self>,
2163 ) {
2164 let lamport_timestamp = self.text.lamport_clock.tick();
2165 self.remote_selections.insert(
2166 self.text.replica_id(),
2167 SelectionSet {
2168 selections: selections.clone(),
2169 lamport_timestamp,
2170 line_mode,
2171 cursor_shape,
2172 },
2173 );
2174 self.send_operation(
2175 Operation::UpdateSelections {
2176 selections,
2177 line_mode,
2178 lamport_timestamp,
2179 cursor_shape,
2180 },
2181 true,
2182 cx,
2183 );
2184 self.non_text_state_update_count += 1;
2185 cx.notify();
2186 }
2187
2188 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2189 /// this replica.
2190 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2191 if self
2192 .remote_selections
2193 .get(&self.text.replica_id())
2194 .map_or(true, |set| !set.selections.is_empty())
2195 {
2196 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2197 }
2198 }
2199
2200 pub fn set_agent_selections(
2201 &mut self,
2202 selections: Arc<[Selection<Anchor>]>,
2203 line_mode: bool,
2204 cursor_shape: CursorShape,
2205 cx: &mut Context<Self>,
2206 ) {
2207 let lamport_timestamp = self.text.lamport_clock.tick();
2208 self.remote_selections.insert(
2209 AGENT_REPLICA_ID,
2210 SelectionSet {
2211 selections: selections.clone(),
2212 lamport_timestamp,
2213 line_mode,
2214 cursor_shape,
2215 },
2216 );
2217 self.non_text_state_update_count += 1;
2218 cx.notify();
2219 }
2220
2221 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2222 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2223 }
2224
2225 /// Replaces the buffer's entire text.
2226 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2227 where
2228 T: Into<Arc<str>>,
2229 {
2230 self.autoindent_requests.clear();
2231 self.edit([(0..self.len(), text)], None, cx)
2232 }
2233
2234 /// Appends the given text to the end of the buffer.
2235 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2236 where
2237 T: Into<Arc<str>>,
2238 {
2239 self.edit([(self.len()..self.len(), text)], None, cx)
2240 }
2241
2242 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2243 /// delete, and a string of text to insert at that location.
2244 ///
2245 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2246 /// request for the edited ranges, which will be processed when the buffer finishes
2247 /// parsing.
2248 ///
2249 /// Parsing takes place at the end of a transaction, and may compute synchronously
2250 /// or asynchronously, depending on the changes.
2251 pub fn edit<I, S, T>(
2252 &mut self,
2253 edits_iter: I,
2254 autoindent_mode: Option<AutoindentMode>,
2255 cx: &mut Context<Self>,
2256 ) -> Option<clock::Lamport>
2257 where
2258 I: IntoIterator<Item = (Range<S>, T)>,
2259 S: ToOffset,
2260 T: Into<Arc<str>>,
2261 {
2262 // Skip invalid edits and coalesce contiguous ones.
2263 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2264
2265 for (range, new_text) in edits_iter {
2266 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2267
2268 if range.start > range.end {
2269 mem::swap(&mut range.start, &mut range.end);
2270 }
2271 let new_text = new_text.into();
2272 if !new_text.is_empty() || !range.is_empty() {
2273 if let Some((prev_range, prev_text)) = edits.last_mut() {
2274 if prev_range.end >= range.start {
2275 prev_range.end = cmp::max(prev_range.end, range.end);
2276 *prev_text = format!("{prev_text}{new_text}").into();
2277 } else {
2278 edits.push((range, new_text));
2279 }
2280 } else {
2281 edits.push((range, new_text));
2282 }
2283 }
2284 }
2285 if edits.is_empty() {
2286 return None;
2287 }
2288
2289 self.start_transaction();
2290 self.pending_autoindent.take();
2291 let autoindent_request = autoindent_mode
2292 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2293
2294 let edit_operation = self.text.edit(edits.iter().cloned());
2295 let edit_id = edit_operation.timestamp();
2296
2297 if let Some((before_edit, mode)) = autoindent_request {
2298 let mut delta = 0isize;
2299 let entries = edits
2300 .into_iter()
2301 .enumerate()
2302 .zip(&edit_operation.as_edit().unwrap().new_text)
2303 .map(|((ix, (range, _)), new_text)| {
2304 let new_text_length = new_text.len();
2305 let old_start = range.start.to_point(&before_edit);
2306 let new_start = (delta + range.start as isize) as usize;
2307 let range_len = range.end - range.start;
2308 delta += new_text_length as isize - range_len as isize;
2309
2310 // Decide what range of the insertion to auto-indent, and whether
2311 // the first line of the insertion should be considered a newly-inserted line
2312 // or an edit to an existing line.
2313 let mut range_of_insertion_to_indent = 0..new_text_length;
2314 let mut first_line_is_new = true;
2315
2316 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2317 let old_line_end = before_edit.line_len(old_start.row);
2318
2319 if old_start.column > old_line_start {
2320 first_line_is_new = false;
2321 }
2322
2323 if !new_text.contains('\n')
2324 && (old_start.column + (range_len as u32) < old_line_end
2325 || old_line_end == old_line_start)
2326 {
2327 first_line_is_new = false;
2328 }
2329
2330 // When inserting text starting with a newline, avoid auto-indenting the
2331 // previous line.
2332 if new_text.starts_with('\n') {
2333 range_of_insertion_to_indent.start += 1;
2334 first_line_is_new = true;
2335 }
2336
2337 let mut original_indent_column = None;
2338 if let AutoindentMode::Block {
2339 original_indent_columns,
2340 } = &mode
2341 {
2342 original_indent_column = Some(if new_text.starts_with('\n') {
2343 indent_size_for_text(
2344 new_text[range_of_insertion_to_indent.clone()].chars(),
2345 )
2346 .len
2347 } else {
2348 original_indent_columns
2349 .get(ix)
2350 .copied()
2351 .flatten()
2352 .unwrap_or_else(|| {
2353 indent_size_for_text(
2354 new_text[range_of_insertion_to_indent.clone()].chars(),
2355 )
2356 .len
2357 })
2358 });
2359
2360 // Avoid auto-indenting the line after the edit.
2361 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2362 range_of_insertion_to_indent.end -= 1;
2363 }
2364 }
2365
2366 AutoindentRequestEntry {
2367 first_line_is_new,
2368 original_indent_column,
2369 indent_size: before_edit.language_indent_size_at(range.start, cx),
2370 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2371 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2372 }
2373 })
2374 .collect();
2375
2376 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2377 before_edit,
2378 entries,
2379 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2380 ignore_empty_lines: false,
2381 }));
2382 }
2383
2384 self.end_transaction(cx);
2385 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2386 Some(edit_id)
2387 }
2388
2389 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2390 self.was_changed();
2391
2392 if self.edits_since::<usize>(old_version).next().is_none() {
2393 return;
2394 }
2395
2396 self.reparse(cx);
2397 cx.emit(BufferEvent::Edited);
2398 if was_dirty != self.is_dirty() {
2399 cx.emit(BufferEvent::DirtyChanged);
2400 }
2401 cx.notify();
2402 }
2403
2404 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2405 where
2406 I: IntoIterator<Item = Range<T>>,
2407 T: ToOffset + Copy,
2408 {
2409 let before_edit = self.snapshot();
2410 let entries = ranges
2411 .into_iter()
2412 .map(|range| AutoindentRequestEntry {
2413 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2414 first_line_is_new: true,
2415 indent_size: before_edit.language_indent_size_at(range.start, cx),
2416 original_indent_column: None,
2417 })
2418 .collect();
2419 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2420 before_edit,
2421 entries,
2422 is_block_mode: false,
2423 ignore_empty_lines: true,
2424 }));
2425 self.request_autoindent(cx);
2426 }
2427
2428 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2429 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2430 pub fn insert_empty_line(
2431 &mut self,
2432 position: impl ToPoint,
2433 space_above: bool,
2434 space_below: bool,
2435 cx: &mut Context<Self>,
2436 ) -> Point {
2437 let mut position = position.to_point(self);
2438
2439 self.start_transaction();
2440
2441 self.edit(
2442 [(position..position, "\n")],
2443 Some(AutoindentMode::EachLine),
2444 cx,
2445 );
2446
2447 if position.column > 0 {
2448 position += Point::new(1, 0);
2449 }
2450
2451 if !self.is_line_blank(position.row) {
2452 self.edit(
2453 [(position..position, "\n")],
2454 Some(AutoindentMode::EachLine),
2455 cx,
2456 );
2457 }
2458
2459 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2460 self.edit(
2461 [(position..position, "\n")],
2462 Some(AutoindentMode::EachLine),
2463 cx,
2464 );
2465 position.row += 1;
2466 }
2467
2468 if space_below
2469 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2470 {
2471 self.edit(
2472 [(position..position, "\n")],
2473 Some(AutoindentMode::EachLine),
2474 cx,
2475 );
2476 }
2477
2478 self.end_transaction(cx);
2479
2480 position
2481 }
2482
2483 /// Applies the given remote operations to the buffer.
2484 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2485 self.pending_autoindent.take();
2486 let was_dirty = self.is_dirty();
2487 let old_version = self.version.clone();
2488 let mut deferred_ops = Vec::new();
2489 let buffer_ops = ops
2490 .into_iter()
2491 .filter_map(|op| match op {
2492 Operation::Buffer(op) => Some(op),
2493 _ => {
2494 if self.can_apply_op(&op) {
2495 self.apply_op(op, cx);
2496 } else {
2497 deferred_ops.push(op);
2498 }
2499 None
2500 }
2501 })
2502 .collect::<Vec<_>>();
2503 for operation in buffer_ops.iter() {
2504 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2505 }
2506 self.text.apply_ops(buffer_ops);
2507 self.deferred_ops.insert(deferred_ops);
2508 self.flush_deferred_ops(cx);
2509 self.did_edit(&old_version, was_dirty, cx);
2510 // Notify independently of whether the buffer was edited as the operations could include a
2511 // selection update.
2512 cx.notify();
2513 }
2514
2515 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2516 let mut deferred_ops = Vec::new();
2517 for op in self.deferred_ops.drain().iter().cloned() {
2518 if self.can_apply_op(&op) {
2519 self.apply_op(op, cx);
2520 } else {
2521 deferred_ops.push(op);
2522 }
2523 }
2524 self.deferred_ops.insert(deferred_ops);
2525 }
2526
2527 pub fn has_deferred_ops(&self) -> bool {
2528 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2529 }
2530
2531 fn can_apply_op(&self, operation: &Operation) -> bool {
2532 match operation {
2533 Operation::Buffer(_) => {
2534 unreachable!("buffer operations should never be applied at this layer")
2535 }
2536 Operation::UpdateDiagnostics {
2537 diagnostics: diagnostic_set,
2538 ..
2539 } => diagnostic_set.iter().all(|diagnostic| {
2540 self.text.can_resolve(&diagnostic.range.start)
2541 && self.text.can_resolve(&diagnostic.range.end)
2542 }),
2543 Operation::UpdateSelections { selections, .. } => selections
2544 .iter()
2545 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2546 Operation::UpdateCompletionTriggers { .. } => true,
2547 }
2548 }
2549
2550 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2551 match operation {
2552 Operation::Buffer(_) => {
2553 unreachable!("buffer operations should never be applied at this layer")
2554 }
2555 Operation::UpdateDiagnostics {
2556 server_id,
2557 diagnostics: diagnostic_set,
2558 lamport_timestamp,
2559 } => {
2560 let snapshot = self.snapshot();
2561 self.apply_diagnostic_update(
2562 server_id,
2563 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2564 lamport_timestamp,
2565 cx,
2566 );
2567 }
2568 Operation::UpdateSelections {
2569 selections,
2570 lamport_timestamp,
2571 line_mode,
2572 cursor_shape,
2573 } => {
2574 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2575 if set.lamport_timestamp > lamport_timestamp {
2576 return;
2577 }
2578 }
2579
2580 self.remote_selections.insert(
2581 lamport_timestamp.replica_id,
2582 SelectionSet {
2583 selections,
2584 lamport_timestamp,
2585 line_mode,
2586 cursor_shape,
2587 },
2588 );
2589 self.text.lamport_clock.observe(lamport_timestamp);
2590 self.non_text_state_update_count += 1;
2591 }
2592 Operation::UpdateCompletionTriggers {
2593 triggers,
2594 lamport_timestamp,
2595 server_id,
2596 } => {
2597 if triggers.is_empty() {
2598 self.completion_triggers_per_language_server
2599 .remove(&server_id);
2600 self.completion_triggers = self
2601 .completion_triggers_per_language_server
2602 .values()
2603 .flat_map(|triggers| triggers.into_iter().cloned())
2604 .collect();
2605 } else {
2606 self.completion_triggers_per_language_server
2607 .insert(server_id, triggers.iter().cloned().collect());
2608 self.completion_triggers.extend(triggers);
2609 }
2610 self.text.lamport_clock.observe(lamport_timestamp);
2611 }
2612 }
2613 }
2614
2615 fn apply_diagnostic_update(
2616 &mut self,
2617 server_id: LanguageServerId,
2618 diagnostics: DiagnosticSet,
2619 lamport_timestamp: clock::Lamport,
2620 cx: &mut Context<Self>,
2621 ) {
2622 if lamport_timestamp > self.diagnostics_timestamp {
2623 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2624 if diagnostics.is_empty() {
2625 if let Ok(ix) = ix {
2626 self.diagnostics.remove(ix);
2627 }
2628 } else {
2629 match ix {
2630 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2631 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2632 };
2633 }
2634 self.diagnostics_timestamp = lamport_timestamp;
2635 self.non_text_state_update_count += 1;
2636 self.text.lamport_clock.observe(lamport_timestamp);
2637 cx.notify();
2638 cx.emit(BufferEvent::DiagnosticsUpdated);
2639 }
2640 }
2641
2642 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2643 self.was_changed();
2644 cx.emit(BufferEvent::Operation {
2645 operation,
2646 is_local,
2647 });
2648 }
2649
2650 /// Removes the selections for a given peer.
2651 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2652 self.remote_selections.remove(&replica_id);
2653 cx.notify();
2654 }
2655
2656 /// Undoes the most recent transaction.
2657 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2658 let was_dirty = self.is_dirty();
2659 let old_version = self.version.clone();
2660
2661 if let Some((transaction_id, operation)) = self.text.undo() {
2662 self.send_operation(Operation::Buffer(operation), true, cx);
2663 self.did_edit(&old_version, was_dirty, cx);
2664 Some(transaction_id)
2665 } else {
2666 None
2667 }
2668 }
2669
2670 /// Manually undoes a specific transaction in the buffer's undo history.
2671 pub fn undo_transaction(
2672 &mut self,
2673 transaction_id: TransactionId,
2674 cx: &mut Context<Self>,
2675 ) -> bool {
2676 let was_dirty = self.is_dirty();
2677 let old_version = self.version.clone();
2678 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2679 self.send_operation(Operation::Buffer(operation), true, cx);
2680 self.did_edit(&old_version, was_dirty, cx);
2681 true
2682 } else {
2683 false
2684 }
2685 }
2686
2687 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2688 pub fn undo_to_transaction(
2689 &mut self,
2690 transaction_id: TransactionId,
2691 cx: &mut Context<Self>,
2692 ) -> bool {
2693 let was_dirty = self.is_dirty();
2694 let old_version = self.version.clone();
2695
2696 let operations = self.text.undo_to_transaction(transaction_id);
2697 let undone = !operations.is_empty();
2698 for operation in operations {
2699 self.send_operation(Operation::Buffer(operation), true, cx);
2700 }
2701 if undone {
2702 self.did_edit(&old_version, was_dirty, cx)
2703 }
2704 undone
2705 }
2706
2707 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2708 let was_dirty = self.is_dirty();
2709 let operation = self.text.undo_operations(counts);
2710 let old_version = self.version.clone();
2711 self.send_operation(Operation::Buffer(operation), true, cx);
2712 self.did_edit(&old_version, was_dirty, cx);
2713 }
2714
2715 /// Manually redoes a specific transaction in the buffer's redo history.
2716 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2717 let was_dirty = self.is_dirty();
2718 let old_version = self.version.clone();
2719
2720 if let Some((transaction_id, operation)) = self.text.redo() {
2721 self.send_operation(Operation::Buffer(operation), true, cx);
2722 self.did_edit(&old_version, was_dirty, cx);
2723 Some(transaction_id)
2724 } else {
2725 None
2726 }
2727 }
2728
2729 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2730 pub fn redo_to_transaction(
2731 &mut self,
2732 transaction_id: TransactionId,
2733 cx: &mut Context<Self>,
2734 ) -> bool {
2735 let was_dirty = self.is_dirty();
2736 let old_version = self.version.clone();
2737
2738 let operations = self.text.redo_to_transaction(transaction_id);
2739 let redone = !operations.is_empty();
2740 for operation in operations {
2741 self.send_operation(Operation::Buffer(operation), true, cx);
2742 }
2743 if redone {
2744 self.did_edit(&old_version, was_dirty, cx)
2745 }
2746 redone
2747 }
2748
2749 /// Override current completion triggers with the user-provided completion triggers.
2750 pub fn set_completion_triggers(
2751 &mut self,
2752 server_id: LanguageServerId,
2753 triggers: BTreeSet<String>,
2754 cx: &mut Context<Self>,
2755 ) {
2756 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2757 if triggers.is_empty() {
2758 self.completion_triggers_per_language_server
2759 .remove(&server_id);
2760 self.completion_triggers = self
2761 .completion_triggers_per_language_server
2762 .values()
2763 .flat_map(|triggers| triggers.into_iter().cloned())
2764 .collect();
2765 } else {
2766 self.completion_triggers_per_language_server
2767 .insert(server_id, triggers.clone());
2768 self.completion_triggers.extend(triggers.iter().cloned());
2769 }
2770 self.send_operation(
2771 Operation::UpdateCompletionTriggers {
2772 triggers: triggers.into_iter().collect(),
2773 lamport_timestamp: self.completion_triggers_timestamp,
2774 server_id,
2775 },
2776 true,
2777 cx,
2778 );
2779 cx.notify();
2780 }
2781
2782 /// Returns a list of strings which trigger a completion menu for this language.
2783 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2784 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2785 &self.completion_triggers
2786 }
2787
2788 /// Call this directly after performing edits to prevent the preview tab
2789 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2790 /// to return false until there are additional edits.
2791 pub fn refresh_preview(&mut self) {
2792 self.preview_version = self.version.clone();
2793 }
2794
2795 /// Whether we should preserve the preview status of a tab containing this buffer.
2796 pub fn preserve_preview(&self) -> bool {
2797 !self.has_edits_since(&self.preview_version)
2798 }
2799}
2800
2801#[doc(hidden)]
2802#[cfg(any(test, feature = "test-support"))]
2803impl Buffer {
2804 pub fn edit_via_marked_text(
2805 &mut self,
2806 marked_string: &str,
2807 autoindent_mode: Option<AutoindentMode>,
2808 cx: &mut Context<Self>,
2809 ) {
2810 let edits = self.edits_for_marked_text(marked_string);
2811 self.edit(edits, autoindent_mode, cx);
2812 }
2813
2814 pub fn set_group_interval(&mut self, group_interval: Duration) {
2815 self.text.set_group_interval(group_interval);
2816 }
2817
2818 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2819 where
2820 T: rand::Rng,
2821 {
2822 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2823 let mut last_end = None;
2824 for _ in 0..old_range_count {
2825 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2826 break;
2827 }
2828
2829 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2830 let mut range = self.random_byte_range(new_start, rng);
2831 if rng.gen_bool(0.2) {
2832 mem::swap(&mut range.start, &mut range.end);
2833 }
2834 last_end = Some(range.end);
2835
2836 let new_text_len = rng.gen_range(0..10);
2837 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2838 new_text = new_text.to_uppercase();
2839
2840 edits.push((range, new_text));
2841 }
2842 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2843 self.edit(edits, None, cx);
2844 }
2845
2846 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2847 let was_dirty = self.is_dirty();
2848 let old_version = self.version.clone();
2849
2850 let ops = self.text.randomly_undo_redo(rng);
2851 if !ops.is_empty() {
2852 for op in ops {
2853 self.send_operation(Operation::Buffer(op), true, cx);
2854 self.did_edit(&old_version, was_dirty, cx);
2855 }
2856 }
2857 }
2858}
2859
2860impl EventEmitter<BufferEvent> for Buffer {}
2861
2862impl Deref for Buffer {
2863 type Target = TextBuffer;
2864
2865 fn deref(&self) -> &Self::Target {
2866 &self.text
2867 }
2868}
2869
2870impl BufferSnapshot {
2871 /// Returns [`IndentSize`] for a given line that respects user settings and
2872 /// language preferences.
2873 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2874 indent_size_for_line(self, row)
2875 }
2876
2877 /// Returns [`IndentSize`] for a given position that respects user settings
2878 /// and language preferences.
2879 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2880 let settings = language_settings(
2881 self.language_at(position).map(|l| l.name()),
2882 self.file(),
2883 cx,
2884 );
2885 if settings.hard_tabs {
2886 IndentSize::tab()
2887 } else {
2888 IndentSize::spaces(settings.tab_size.get())
2889 }
2890 }
2891
2892 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2893 /// is passed in as `single_indent_size`.
2894 pub fn suggested_indents(
2895 &self,
2896 rows: impl Iterator<Item = u32>,
2897 single_indent_size: IndentSize,
2898 ) -> BTreeMap<u32, IndentSize> {
2899 let mut result = BTreeMap::new();
2900
2901 for row_range in contiguous_ranges(rows, 10) {
2902 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2903 Some(suggestions) => suggestions,
2904 _ => break,
2905 };
2906
2907 for (row, suggestion) in row_range.zip(suggestions) {
2908 let indent_size = if let Some(suggestion) = suggestion {
2909 result
2910 .get(&suggestion.basis_row)
2911 .copied()
2912 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2913 .with_delta(suggestion.delta, single_indent_size)
2914 } else {
2915 self.indent_size_for_line(row)
2916 };
2917
2918 result.insert(row, indent_size);
2919 }
2920 }
2921
2922 result
2923 }
2924
2925 fn suggest_autoindents(
2926 &self,
2927 row_range: Range<u32>,
2928 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2929 let config = &self.language.as_ref()?.config;
2930 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2931
2932 #[derive(Debug, Clone)]
2933 struct StartPosition {
2934 start: Point,
2935 suffix: SharedString,
2936 }
2937
2938 // Find the suggested indentation ranges based on the syntax tree.
2939 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2940 let end = Point::new(row_range.end, 0);
2941 let range = (start..end).to_offset(&self.text);
2942 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2943 Some(&grammar.indents_config.as_ref()?.query)
2944 });
2945 let indent_configs = matches
2946 .grammars()
2947 .iter()
2948 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2949 .collect::<Vec<_>>();
2950
2951 let mut indent_ranges = Vec::<Range<Point>>::new();
2952 let mut start_positions = Vec::<StartPosition>::new();
2953 let mut outdent_positions = Vec::<Point>::new();
2954 while let Some(mat) = matches.peek() {
2955 let mut start: Option<Point> = None;
2956 let mut end: Option<Point> = None;
2957
2958 let config = indent_configs[mat.grammar_index];
2959 for capture in mat.captures {
2960 if capture.index == config.indent_capture_ix {
2961 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2962 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2963 } else if Some(capture.index) == config.start_capture_ix {
2964 start = Some(Point::from_ts_point(capture.node.end_position()));
2965 } else if Some(capture.index) == config.end_capture_ix {
2966 end = Some(Point::from_ts_point(capture.node.start_position()));
2967 } else if Some(capture.index) == config.outdent_capture_ix {
2968 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2969 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2970 start_positions.push(StartPosition {
2971 start: Point::from_ts_point(capture.node.start_position()),
2972 suffix: suffix.clone(),
2973 });
2974 }
2975 }
2976
2977 matches.advance();
2978 if let Some((start, end)) = start.zip(end) {
2979 if start.row == end.row {
2980 continue;
2981 }
2982 let range = start..end;
2983 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2984 Err(ix) => indent_ranges.insert(ix, range),
2985 Ok(ix) => {
2986 let prev_range = &mut indent_ranges[ix];
2987 prev_range.end = prev_range.end.max(range.end);
2988 }
2989 }
2990 }
2991 }
2992
2993 let mut error_ranges = Vec::<Range<Point>>::new();
2994 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2995 grammar.error_query.as_ref()
2996 });
2997 while let Some(mat) = matches.peek() {
2998 let node = mat.captures[0].node;
2999 let start = Point::from_ts_point(node.start_position());
3000 let end = Point::from_ts_point(node.end_position());
3001 let range = start..end;
3002 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3003 Ok(ix) | Err(ix) => ix,
3004 };
3005 let mut end_ix = ix;
3006 while let Some(existing_range) = error_ranges.get(end_ix) {
3007 if existing_range.end < end {
3008 end_ix += 1;
3009 } else {
3010 break;
3011 }
3012 }
3013 error_ranges.splice(ix..end_ix, [range]);
3014 matches.advance();
3015 }
3016
3017 outdent_positions.sort();
3018 for outdent_position in outdent_positions {
3019 // find the innermost indent range containing this outdent_position
3020 // set its end to the outdent position
3021 if let Some(range_to_truncate) = indent_ranges
3022 .iter_mut()
3023 .filter(|indent_range| indent_range.contains(&outdent_position))
3024 .next_back()
3025 {
3026 range_to_truncate.end = outdent_position;
3027 }
3028 }
3029
3030 start_positions.sort_by_key(|b| b.start);
3031
3032 // Find the suggested indentation increases and decreased based on regexes.
3033 let mut regex_outdent_map = HashMap::default();
3034 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3035 let mut start_positions_iter = start_positions.iter().peekable();
3036
3037 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3038 self.for_each_line(
3039 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3040 ..Point::new(row_range.end, 0),
3041 |row, line| {
3042 if config
3043 .decrease_indent_pattern
3044 .as_ref()
3045 .map_or(false, |regex| regex.is_match(line))
3046 {
3047 indent_change_rows.push((row, Ordering::Less));
3048 }
3049 if config
3050 .increase_indent_pattern
3051 .as_ref()
3052 .map_or(false, |regex| regex.is_match(line))
3053 {
3054 indent_change_rows.push((row + 1, Ordering::Greater));
3055 }
3056 while let Some(pos) = start_positions_iter.peek() {
3057 if pos.start.row < row {
3058 let pos = start_positions_iter.next().unwrap();
3059 last_seen_suffix
3060 .entry(pos.suffix.to_string())
3061 .or_default()
3062 .push(pos.start);
3063 } else {
3064 break;
3065 }
3066 }
3067 for rule in &config.decrease_indent_patterns {
3068 if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) {
3069 let row_start_column = self.indent_size_for_line(row).len;
3070 let basis_row = rule
3071 .valid_after
3072 .iter()
3073 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3074 .flatten()
3075 .filter(|start_point| start_point.column <= row_start_column)
3076 .max_by_key(|start_point| start_point.row);
3077 if let Some(outdent_to_row) = basis_row {
3078 regex_outdent_map.insert(row, outdent_to_row.row);
3079 }
3080 break;
3081 }
3082 }
3083 },
3084 );
3085
3086 let mut indent_changes = indent_change_rows.into_iter().peekable();
3087 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3088 prev_non_blank_row.unwrap_or(0)
3089 } else {
3090 row_range.start.saturating_sub(1)
3091 };
3092
3093 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3094 Some(row_range.map(move |row| {
3095 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3096
3097 let mut indent_from_prev_row = false;
3098 let mut outdent_from_prev_row = false;
3099 let mut outdent_to_row = u32::MAX;
3100 let mut from_regex = false;
3101
3102 while let Some((indent_row, delta)) = indent_changes.peek() {
3103 match indent_row.cmp(&row) {
3104 Ordering::Equal => match delta {
3105 Ordering::Less => {
3106 from_regex = true;
3107 outdent_from_prev_row = true
3108 }
3109 Ordering::Greater => {
3110 indent_from_prev_row = true;
3111 from_regex = true
3112 }
3113 _ => {}
3114 },
3115
3116 Ordering::Greater => break,
3117 Ordering::Less => {}
3118 }
3119
3120 indent_changes.next();
3121 }
3122
3123 for range in &indent_ranges {
3124 if range.start.row >= row {
3125 break;
3126 }
3127 if range.start.row == prev_row && range.end > row_start {
3128 indent_from_prev_row = true;
3129 }
3130 if range.end > prev_row_start && range.end <= row_start {
3131 outdent_to_row = outdent_to_row.min(range.start.row);
3132 }
3133 }
3134
3135 if let Some(basis_row) = regex_outdent_map.get(&row) {
3136 indent_from_prev_row = false;
3137 outdent_to_row = *basis_row;
3138 from_regex = true;
3139 }
3140
3141 let within_error = error_ranges
3142 .iter()
3143 .any(|e| e.start.row < row && e.end > row_start);
3144
3145 let suggestion = if outdent_to_row == prev_row
3146 || (outdent_from_prev_row && indent_from_prev_row)
3147 {
3148 Some(IndentSuggestion {
3149 basis_row: prev_row,
3150 delta: Ordering::Equal,
3151 within_error: within_error && !from_regex,
3152 })
3153 } else if indent_from_prev_row {
3154 Some(IndentSuggestion {
3155 basis_row: prev_row,
3156 delta: Ordering::Greater,
3157 within_error: within_error && !from_regex,
3158 })
3159 } else if outdent_to_row < prev_row {
3160 Some(IndentSuggestion {
3161 basis_row: outdent_to_row,
3162 delta: Ordering::Equal,
3163 within_error: within_error && !from_regex,
3164 })
3165 } else if outdent_from_prev_row {
3166 Some(IndentSuggestion {
3167 basis_row: prev_row,
3168 delta: Ordering::Less,
3169 within_error: within_error && !from_regex,
3170 })
3171 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3172 {
3173 Some(IndentSuggestion {
3174 basis_row: prev_row,
3175 delta: Ordering::Equal,
3176 within_error: within_error && !from_regex,
3177 })
3178 } else {
3179 None
3180 };
3181
3182 prev_row = row;
3183 prev_row_start = row_start;
3184 suggestion
3185 }))
3186 }
3187
3188 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3189 while row > 0 {
3190 row -= 1;
3191 if !self.is_line_blank(row) {
3192 return Some(row);
3193 }
3194 }
3195 None
3196 }
3197
3198 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3199 let captures = self.syntax.captures(range, &self.text, |grammar| {
3200 grammar.highlights_query.as_ref()
3201 });
3202 let highlight_maps = captures
3203 .grammars()
3204 .iter()
3205 .map(|grammar| grammar.highlight_map())
3206 .collect();
3207 (captures, highlight_maps)
3208 }
3209
3210 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3211 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3212 /// returned in chunks where each chunk has a single syntax highlighting style and
3213 /// diagnostic status.
3214 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3215 let range = range.start.to_offset(self)..range.end.to_offset(self);
3216
3217 let mut syntax = None;
3218 if language_aware {
3219 syntax = Some(self.get_highlights(range.clone()));
3220 }
3221 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3222 let diagnostics = language_aware;
3223 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3224 }
3225
3226 pub fn highlighted_text_for_range<T: ToOffset>(
3227 &self,
3228 range: Range<T>,
3229 override_style: Option<HighlightStyle>,
3230 syntax_theme: &SyntaxTheme,
3231 ) -> HighlightedText {
3232 HighlightedText::from_buffer_range(
3233 range,
3234 &self.text,
3235 &self.syntax,
3236 override_style,
3237 syntax_theme,
3238 )
3239 }
3240
3241 /// Invokes the given callback for each line of text in the given range of the buffer.
3242 /// Uses callback to avoid allocating a string for each line.
3243 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3244 let mut line = String::new();
3245 let mut row = range.start.row;
3246 for chunk in self
3247 .as_rope()
3248 .chunks_in_range(range.to_offset(self))
3249 .chain(["\n"])
3250 {
3251 for (newline_ix, text) in chunk.split('\n').enumerate() {
3252 if newline_ix > 0 {
3253 callback(row, &line);
3254 row += 1;
3255 line.clear();
3256 }
3257 line.push_str(text);
3258 }
3259 }
3260 }
3261
3262 /// Iterates over every [`SyntaxLayer`] in the buffer.
3263 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3264 self.syntax
3265 .layers_for_range(0..self.len(), &self.text, true)
3266 }
3267
3268 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3269 let offset = position.to_offset(self);
3270 self.syntax
3271 .layers_for_range(offset..offset, &self.text, false)
3272 .filter(|l| l.node().end_byte() > offset)
3273 .last()
3274 }
3275
3276 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3277 &self,
3278 range: Range<D>,
3279 ) -> Option<SyntaxLayer<'_>> {
3280 let range = range.to_offset(self);
3281 return self
3282 .syntax
3283 .layers_for_range(range, &self.text, false)
3284 .max_by(|a, b| {
3285 if a.depth != b.depth {
3286 a.depth.cmp(&b.depth)
3287 } else if a.offset.0 != b.offset.0 {
3288 a.offset.0.cmp(&b.offset.0)
3289 } else {
3290 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3291 }
3292 });
3293 }
3294
3295 /// Returns the main [`Language`].
3296 pub fn language(&self) -> Option<&Arc<Language>> {
3297 self.language.as_ref()
3298 }
3299
3300 /// Returns the [`Language`] at the given location.
3301 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3302 self.syntax_layer_at(position)
3303 .map(|info| info.language)
3304 .or(self.language.as_ref())
3305 }
3306
3307 /// Returns the settings for the language at the given location.
3308 pub fn settings_at<'a, D: ToOffset>(
3309 &'a self,
3310 position: D,
3311 cx: &'a App,
3312 ) -> Cow<'a, LanguageSettings> {
3313 language_settings(
3314 self.language_at(position).map(|l| l.name()),
3315 self.file.as_ref(),
3316 cx,
3317 )
3318 }
3319
3320 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3321 CharClassifier::new(self.language_scope_at(point))
3322 }
3323
3324 /// Returns the [`LanguageScope`] at the given location.
3325 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3326 let offset = position.to_offset(self);
3327 let mut scope = None;
3328 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3329
3330 // Use the layer that has the smallest node intersecting the given point.
3331 for layer in self
3332 .syntax
3333 .layers_for_range(offset..offset, &self.text, false)
3334 {
3335 let mut cursor = layer.node().walk();
3336
3337 let mut range = None;
3338 loop {
3339 let child_range = cursor.node().byte_range();
3340 if !child_range.contains(&offset) {
3341 break;
3342 }
3343
3344 range = Some(child_range);
3345 if cursor.goto_first_child_for_byte(offset).is_none() {
3346 break;
3347 }
3348 }
3349
3350 if let Some(range) = range {
3351 if smallest_range_and_depth.as_ref().map_or(
3352 true,
3353 |(smallest_range, smallest_range_depth)| {
3354 if layer.depth > *smallest_range_depth {
3355 true
3356 } else if layer.depth == *smallest_range_depth {
3357 range.len() < smallest_range.len()
3358 } else {
3359 false
3360 }
3361 },
3362 ) {
3363 smallest_range_and_depth = Some((range, layer.depth));
3364 scope = Some(LanguageScope {
3365 language: layer.language.clone(),
3366 override_id: layer.override_id(offset, &self.text),
3367 });
3368 }
3369 }
3370 }
3371
3372 scope.or_else(|| {
3373 self.language.clone().map(|language| LanguageScope {
3374 language,
3375 override_id: None,
3376 })
3377 })
3378 }
3379
3380 /// Returns a tuple of the range and character kind of the word
3381 /// surrounding the given position.
3382 pub fn surrounding_word<T: ToOffset>(
3383 &self,
3384 start: T,
3385 for_completion: bool,
3386 ) -> (Range<usize>, Option<CharKind>) {
3387 let mut start = start.to_offset(self);
3388 let mut end = start;
3389 let mut next_chars = self.chars_at(start).take(128).peekable();
3390 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3391
3392 let classifier = self
3393 .char_classifier_at(start)
3394 .for_completion(for_completion);
3395 let word_kind = cmp::max(
3396 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3397 next_chars.peek().copied().map(|c| classifier.kind(c)),
3398 );
3399
3400 for ch in prev_chars {
3401 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3402 start -= ch.len_utf8();
3403 } else {
3404 break;
3405 }
3406 }
3407
3408 for ch in next_chars {
3409 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3410 end += ch.len_utf8();
3411 } else {
3412 break;
3413 }
3414 }
3415
3416 (start..end, word_kind)
3417 }
3418
3419 /// Returns the closest syntax node enclosing the given range.
3420 pub fn syntax_ancestor<'a, T: ToOffset>(
3421 &'a self,
3422 range: Range<T>,
3423 ) -> Option<tree_sitter::Node<'a>> {
3424 let range = range.start.to_offset(self)..range.end.to_offset(self);
3425 let mut result: Option<tree_sitter::Node<'a>> = None;
3426 'outer: for layer in self
3427 .syntax
3428 .layers_for_range(range.clone(), &self.text, true)
3429 {
3430 let mut cursor = layer.node().walk();
3431
3432 // Descend to the first leaf that touches the start of the range.
3433 //
3434 // If the range is non-empty and the current node ends exactly at the start,
3435 // move to the next sibling to find a node that extends beyond the start.
3436 //
3437 // If the range is empty and the current node starts after the range position,
3438 // move to the previous sibling to find the node that contains the position.
3439 while cursor.goto_first_child_for_byte(range.start).is_some() {
3440 if !range.is_empty() && cursor.node().end_byte() == range.start {
3441 cursor.goto_next_sibling();
3442 }
3443 if range.is_empty() && cursor.node().start_byte() > range.start {
3444 cursor.goto_previous_sibling();
3445 }
3446 }
3447
3448 // Ascend to the smallest ancestor that strictly contains the range.
3449 loop {
3450 let node_range = cursor.node().byte_range();
3451 if node_range.start <= range.start
3452 && node_range.end >= range.end
3453 && node_range.len() > range.len()
3454 {
3455 break;
3456 }
3457 if !cursor.goto_parent() {
3458 continue 'outer;
3459 }
3460 }
3461
3462 let left_node = cursor.node();
3463 let mut layer_result = left_node;
3464
3465 // For an empty range, try to find another node immediately to the right of the range.
3466 if left_node.end_byte() == range.start {
3467 let mut right_node = None;
3468 while !cursor.goto_next_sibling() {
3469 if !cursor.goto_parent() {
3470 break;
3471 }
3472 }
3473
3474 while cursor.node().start_byte() == range.start {
3475 right_node = Some(cursor.node());
3476 if !cursor.goto_first_child() {
3477 break;
3478 }
3479 }
3480
3481 // If there is a candidate node on both sides of the (empty) range, then
3482 // decide between the two by favoring a named node over an anonymous token.
3483 // If both nodes are the same in that regard, favor the right one.
3484 if let Some(right_node) = right_node {
3485 if right_node.is_named() || !left_node.is_named() {
3486 layer_result = right_node;
3487 }
3488 }
3489 }
3490
3491 if let Some(previous_result) = &result {
3492 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3493 continue;
3494 }
3495 }
3496 result = Some(layer_result);
3497 }
3498
3499 result
3500 }
3501
3502 /// Returns the root syntax node within the given row
3503 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3504 let start_offset = position.to_offset(self);
3505
3506 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3507
3508 let layer = self
3509 .syntax
3510 .layers_for_range(start_offset..start_offset, &self.text, true)
3511 .next()?;
3512
3513 let mut cursor = layer.node().walk();
3514
3515 // Descend to the first leaf that touches the start of the range.
3516 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3517 if cursor.node().end_byte() == start_offset {
3518 cursor.goto_next_sibling();
3519 }
3520 }
3521
3522 // Ascend to the root node within the same row.
3523 while cursor.goto_parent() {
3524 if cursor.node().start_position().row != row {
3525 break;
3526 }
3527 }
3528
3529 return Some(cursor.node());
3530 }
3531
3532 /// Returns the outline for the buffer.
3533 ///
3534 /// This method allows passing an optional [`SyntaxTheme`] to
3535 /// syntax-highlight the returned symbols.
3536 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3537 self.outline_items_containing(0..self.len(), true, theme)
3538 .map(Outline::new)
3539 }
3540
3541 /// Returns all the symbols that contain the given position.
3542 ///
3543 /// This method allows passing an optional [`SyntaxTheme`] to
3544 /// syntax-highlight the returned symbols.
3545 pub fn symbols_containing<T: ToOffset>(
3546 &self,
3547 position: T,
3548 theme: Option<&SyntaxTheme>,
3549 ) -> Option<Vec<OutlineItem<Anchor>>> {
3550 let position = position.to_offset(self);
3551 let mut items = self.outline_items_containing(
3552 position.saturating_sub(1)..self.len().min(position + 1),
3553 false,
3554 theme,
3555 )?;
3556 let mut prev_depth = None;
3557 items.retain(|item| {
3558 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3559 prev_depth = Some(item.depth);
3560 result
3561 });
3562 Some(items)
3563 }
3564
3565 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3566 let range = range.to_offset(self);
3567 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3568 grammar.outline_config.as_ref().map(|c| &c.query)
3569 });
3570 let configs = matches
3571 .grammars()
3572 .iter()
3573 .map(|g| g.outline_config.as_ref().unwrap())
3574 .collect::<Vec<_>>();
3575
3576 while let Some(mat) = matches.peek() {
3577 let config = &configs[mat.grammar_index];
3578 let containing_item_node = maybe!({
3579 let item_node = mat.captures.iter().find_map(|cap| {
3580 if cap.index == config.item_capture_ix {
3581 Some(cap.node)
3582 } else {
3583 None
3584 }
3585 })?;
3586
3587 let item_byte_range = item_node.byte_range();
3588 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3589 None
3590 } else {
3591 Some(item_node)
3592 }
3593 });
3594
3595 if let Some(item_node) = containing_item_node {
3596 return Some(
3597 Point::from_ts_point(item_node.start_position())
3598 ..Point::from_ts_point(item_node.end_position()),
3599 );
3600 }
3601
3602 matches.advance();
3603 }
3604 None
3605 }
3606
3607 pub fn outline_items_containing<T: ToOffset>(
3608 &self,
3609 range: Range<T>,
3610 include_extra_context: bool,
3611 theme: Option<&SyntaxTheme>,
3612 ) -> Option<Vec<OutlineItem<Anchor>>> {
3613 let range = range.to_offset(self);
3614 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3615 grammar.outline_config.as_ref().map(|c| &c.query)
3616 });
3617 let configs = matches
3618 .grammars()
3619 .iter()
3620 .map(|g| g.outline_config.as_ref().unwrap())
3621 .collect::<Vec<_>>();
3622
3623 let mut items = Vec::new();
3624 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3625 while let Some(mat) = matches.peek() {
3626 let config = &configs[mat.grammar_index];
3627 if let Some(item) =
3628 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3629 {
3630 items.push(item);
3631 } else if let Some(capture) = mat
3632 .captures
3633 .iter()
3634 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3635 {
3636 let capture_range = capture.node.start_position()..capture.node.end_position();
3637 let mut capture_row_range =
3638 capture_range.start.row as u32..capture_range.end.row as u32;
3639 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3640 {
3641 capture_row_range.end -= 1;
3642 }
3643 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3644 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3645 last_row_range.end = capture_row_range.end;
3646 } else {
3647 annotation_row_ranges.push(capture_row_range);
3648 }
3649 } else {
3650 annotation_row_ranges.push(capture_row_range);
3651 }
3652 }
3653 matches.advance();
3654 }
3655
3656 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3657
3658 // Assign depths based on containment relationships and convert to anchors.
3659 let mut item_ends_stack = Vec::<Point>::new();
3660 let mut anchor_items = Vec::new();
3661 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3662 for item in items {
3663 while let Some(last_end) = item_ends_stack.last().copied() {
3664 if last_end < item.range.end {
3665 item_ends_stack.pop();
3666 } else {
3667 break;
3668 }
3669 }
3670
3671 let mut annotation_row_range = None;
3672 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3673 let row_preceding_item = item.range.start.row.saturating_sub(1);
3674 if next_annotation_row_range.end < row_preceding_item {
3675 annotation_row_ranges.next();
3676 } else {
3677 if next_annotation_row_range.end == row_preceding_item {
3678 annotation_row_range = Some(next_annotation_row_range.clone());
3679 annotation_row_ranges.next();
3680 }
3681 break;
3682 }
3683 }
3684
3685 anchor_items.push(OutlineItem {
3686 depth: item_ends_stack.len(),
3687 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3688 text: item.text,
3689 highlight_ranges: item.highlight_ranges,
3690 name_ranges: item.name_ranges,
3691 body_range: item.body_range.map(|body_range| {
3692 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3693 }),
3694 annotation_range: annotation_row_range.map(|annotation_range| {
3695 self.anchor_after(Point::new(annotation_range.start, 0))
3696 ..self.anchor_before(Point::new(
3697 annotation_range.end,
3698 self.line_len(annotation_range.end),
3699 ))
3700 }),
3701 });
3702 item_ends_stack.push(item.range.end);
3703 }
3704
3705 Some(anchor_items)
3706 }
3707
3708 fn next_outline_item(
3709 &self,
3710 config: &OutlineConfig,
3711 mat: &SyntaxMapMatch,
3712 range: &Range<usize>,
3713 include_extra_context: bool,
3714 theme: Option<&SyntaxTheme>,
3715 ) -> Option<OutlineItem<Point>> {
3716 let item_node = mat.captures.iter().find_map(|cap| {
3717 if cap.index == config.item_capture_ix {
3718 Some(cap.node)
3719 } else {
3720 None
3721 }
3722 })?;
3723
3724 let item_byte_range = item_node.byte_range();
3725 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3726 return None;
3727 }
3728 let item_point_range = Point::from_ts_point(item_node.start_position())
3729 ..Point::from_ts_point(item_node.end_position());
3730
3731 let mut open_point = None;
3732 let mut close_point = None;
3733 let mut buffer_ranges = Vec::new();
3734 for capture in mat.captures {
3735 let node_is_name;
3736 if capture.index == config.name_capture_ix {
3737 node_is_name = true;
3738 } else if Some(capture.index) == config.context_capture_ix
3739 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3740 {
3741 node_is_name = false;
3742 } else {
3743 if Some(capture.index) == config.open_capture_ix {
3744 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3745 } else if Some(capture.index) == config.close_capture_ix {
3746 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3747 }
3748
3749 continue;
3750 }
3751
3752 let mut range = capture.node.start_byte()..capture.node.end_byte();
3753 let start = capture.node.start_position();
3754 if capture.node.end_position().row > start.row {
3755 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3756 }
3757
3758 if !range.is_empty() {
3759 buffer_ranges.push((range, node_is_name));
3760 }
3761 }
3762 if buffer_ranges.is_empty() {
3763 return None;
3764 }
3765 let mut text = String::new();
3766 let mut highlight_ranges = Vec::new();
3767 let mut name_ranges = Vec::new();
3768 let mut chunks = self.chunks(
3769 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3770 true,
3771 );
3772 let mut last_buffer_range_end = 0;
3773
3774 for (buffer_range, is_name) in buffer_ranges {
3775 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3776 if space_added {
3777 text.push(' ');
3778 }
3779 let before_append_len = text.len();
3780 let mut offset = buffer_range.start;
3781 chunks.seek(buffer_range.clone());
3782 for mut chunk in chunks.by_ref() {
3783 if chunk.text.len() > buffer_range.end - offset {
3784 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3785 offset = buffer_range.end;
3786 } else {
3787 offset += chunk.text.len();
3788 }
3789 let style = chunk
3790 .syntax_highlight_id
3791 .zip(theme)
3792 .and_then(|(highlight, theme)| highlight.style(theme));
3793 if let Some(style) = style {
3794 let start = text.len();
3795 let end = start + chunk.text.len();
3796 highlight_ranges.push((start..end, style));
3797 }
3798 text.push_str(chunk.text);
3799 if offset >= buffer_range.end {
3800 break;
3801 }
3802 }
3803 if is_name {
3804 let after_append_len = text.len();
3805 let start = if space_added && !name_ranges.is_empty() {
3806 before_append_len - 1
3807 } else {
3808 before_append_len
3809 };
3810 name_ranges.push(start..after_append_len);
3811 }
3812 last_buffer_range_end = buffer_range.end;
3813 }
3814
3815 Some(OutlineItem {
3816 depth: 0, // We'll calculate the depth later
3817 range: item_point_range,
3818 text,
3819 highlight_ranges,
3820 name_ranges,
3821 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3822 annotation_range: None,
3823 })
3824 }
3825
3826 pub fn function_body_fold_ranges<T: ToOffset>(
3827 &self,
3828 within: Range<T>,
3829 ) -> impl Iterator<Item = Range<usize>> + '_ {
3830 self.text_object_ranges(within, TreeSitterOptions::default())
3831 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3832 }
3833
3834 /// For each grammar in the language, runs the provided
3835 /// [`tree_sitter::Query`] against the given range.
3836 pub fn matches(
3837 &self,
3838 range: Range<usize>,
3839 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3840 ) -> SyntaxMapMatches<'_> {
3841 self.syntax.matches(range, self, query)
3842 }
3843
3844 pub fn all_bracket_ranges(
3845 &self,
3846 range: Range<usize>,
3847 ) -> impl Iterator<Item = BracketMatch> + '_ {
3848 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3849 grammar.brackets_config.as_ref().map(|c| &c.query)
3850 });
3851 let configs = matches
3852 .grammars()
3853 .iter()
3854 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3855 .collect::<Vec<_>>();
3856
3857 iter::from_fn(move || {
3858 while let Some(mat) = matches.peek() {
3859 let mut open = None;
3860 let mut close = None;
3861 let config = &configs[mat.grammar_index];
3862 let pattern = &config.patterns[mat.pattern_index];
3863 for capture in mat.captures {
3864 if capture.index == config.open_capture_ix {
3865 open = Some(capture.node.byte_range());
3866 } else if capture.index == config.close_capture_ix {
3867 close = Some(capture.node.byte_range());
3868 }
3869 }
3870
3871 matches.advance();
3872
3873 let Some((open_range, close_range)) = open.zip(close) else {
3874 continue;
3875 };
3876
3877 let bracket_range = open_range.start..=close_range.end;
3878 if !bracket_range.overlaps(&range) {
3879 continue;
3880 }
3881
3882 return Some(BracketMatch {
3883 open_range,
3884 close_range,
3885 newline_only: pattern.newline_only,
3886 });
3887 }
3888 None
3889 })
3890 }
3891
3892 /// Returns bracket range pairs overlapping or adjacent to `range`
3893 pub fn bracket_ranges<T: ToOffset>(
3894 &self,
3895 range: Range<T>,
3896 ) -> impl Iterator<Item = BracketMatch> + '_ {
3897 // Find bracket pairs that *inclusively* contain the given range.
3898 let range = range.start.to_offset(self).saturating_sub(1)
3899 ..self.len().min(range.end.to_offset(self) + 1);
3900 self.all_bracket_ranges(range)
3901 .filter(|pair| !pair.newline_only)
3902 }
3903
3904 pub fn debug_variables_query<T: ToOffset>(
3905 &self,
3906 range: Range<T>,
3907 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3908 let range = range.start.to_offset(self).saturating_sub(1)
3909 ..self.len().min(range.end.to_offset(self) + 1);
3910
3911 let mut matches = self.syntax.matches_with_options(
3912 range.clone(),
3913 &self.text,
3914 TreeSitterOptions::default(),
3915 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3916 );
3917
3918 let configs = matches
3919 .grammars()
3920 .iter()
3921 .map(|grammar| grammar.debug_variables_config.as_ref())
3922 .collect::<Vec<_>>();
3923
3924 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3925
3926 iter::from_fn(move || {
3927 loop {
3928 while let Some(capture) = captures.pop() {
3929 if capture.0.overlaps(&range) {
3930 return Some(capture);
3931 }
3932 }
3933
3934 let mat = matches.peek()?;
3935
3936 let Some(config) = configs[mat.grammar_index].as_ref() else {
3937 matches.advance();
3938 continue;
3939 };
3940
3941 for capture in mat.captures {
3942 let Some(ix) = config
3943 .objects_by_capture_ix
3944 .binary_search_by_key(&capture.index, |e| e.0)
3945 .ok()
3946 else {
3947 continue;
3948 };
3949 let text_object = config.objects_by_capture_ix[ix].1;
3950 let byte_range = capture.node.byte_range();
3951
3952 let mut found = false;
3953 for (range, existing) in captures.iter_mut() {
3954 if existing == &text_object {
3955 range.start = range.start.min(byte_range.start);
3956 range.end = range.end.max(byte_range.end);
3957 found = true;
3958 break;
3959 }
3960 }
3961
3962 if !found {
3963 captures.push((byte_range, text_object));
3964 }
3965 }
3966
3967 matches.advance();
3968 }
3969 })
3970 }
3971
3972 pub fn text_object_ranges<T: ToOffset>(
3973 &self,
3974 range: Range<T>,
3975 options: TreeSitterOptions,
3976 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3977 let range = range.start.to_offset(self).saturating_sub(1)
3978 ..self.len().min(range.end.to_offset(self) + 1);
3979
3980 let mut matches =
3981 self.syntax
3982 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3983 grammar.text_object_config.as_ref().map(|c| &c.query)
3984 });
3985
3986 let configs = matches
3987 .grammars()
3988 .iter()
3989 .map(|grammar| grammar.text_object_config.as_ref())
3990 .collect::<Vec<_>>();
3991
3992 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3993
3994 iter::from_fn(move || {
3995 loop {
3996 while let Some(capture) = captures.pop() {
3997 if capture.0.overlaps(&range) {
3998 return Some(capture);
3999 }
4000 }
4001
4002 let mat = matches.peek()?;
4003
4004 let Some(config) = configs[mat.grammar_index].as_ref() else {
4005 matches.advance();
4006 continue;
4007 };
4008
4009 for capture in mat.captures {
4010 let Some(ix) = config
4011 .text_objects_by_capture_ix
4012 .binary_search_by_key(&capture.index, |e| e.0)
4013 .ok()
4014 else {
4015 continue;
4016 };
4017 let text_object = config.text_objects_by_capture_ix[ix].1;
4018 let byte_range = capture.node.byte_range();
4019
4020 let mut found = false;
4021 for (range, existing) in captures.iter_mut() {
4022 if existing == &text_object {
4023 range.start = range.start.min(byte_range.start);
4024 range.end = range.end.max(byte_range.end);
4025 found = true;
4026 break;
4027 }
4028 }
4029
4030 if !found {
4031 captures.push((byte_range, text_object));
4032 }
4033 }
4034
4035 matches.advance();
4036 }
4037 })
4038 }
4039
4040 /// Returns enclosing bracket ranges containing the given range
4041 pub fn enclosing_bracket_ranges<T: ToOffset>(
4042 &self,
4043 range: Range<T>,
4044 ) -> impl Iterator<Item = BracketMatch> + '_ {
4045 let range = range.start.to_offset(self)..range.end.to_offset(self);
4046
4047 self.bracket_ranges(range.clone()).filter(move |pair| {
4048 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4049 })
4050 }
4051
4052 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4053 ///
4054 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4055 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4056 &self,
4057 range: Range<T>,
4058 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4059 ) -> Option<(Range<usize>, Range<usize>)> {
4060 let range = range.start.to_offset(self)..range.end.to_offset(self);
4061
4062 // Get the ranges of the innermost pair of brackets.
4063 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4064
4065 for pair in self.enclosing_bracket_ranges(range.clone()) {
4066 if let Some(range_filter) = range_filter {
4067 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
4068 continue;
4069 }
4070 }
4071
4072 let len = pair.close_range.end - pair.open_range.start;
4073
4074 if let Some((existing_open, existing_close)) = &result {
4075 let existing_len = existing_close.end - existing_open.start;
4076 if len > existing_len {
4077 continue;
4078 }
4079 }
4080
4081 result = Some((pair.open_range, pair.close_range));
4082 }
4083
4084 result
4085 }
4086
4087 /// Returns anchor ranges for any matches of the redaction query.
4088 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4089 /// will be run on the relevant section of the buffer.
4090 pub fn redacted_ranges<T: ToOffset>(
4091 &self,
4092 range: Range<T>,
4093 ) -> impl Iterator<Item = Range<usize>> + '_ {
4094 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4095 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4096 grammar
4097 .redactions_config
4098 .as_ref()
4099 .map(|config| &config.query)
4100 });
4101
4102 let configs = syntax_matches
4103 .grammars()
4104 .iter()
4105 .map(|grammar| grammar.redactions_config.as_ref())
4106 .collect::<Vec<_>>();
4107
4108 iter::from_fn(move || {
4109 let redacted_range = syntax_matches
4110 .peek()
4111 .and_then(|mat| {
4112 configs[mat.grammar_index].and_then(|config| {
4113 mat.captures
4114 .iter()
4115 .find(|capture| capture.index == config.redaction_capture_ix)
4116 })
4117 })
4118 .map(|mat| mat.node.byte_range());
4119 syntax_matches.advance();
4120 redacted_range
4121 })
4122 }
4123
4124 pub fn injections_intersecting_range<T: ToOffset>(
4125 &self,
4126 range: Range<T>,
4127 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4128 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4129
4130 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4131 grammar
4132 .injection_config
4133 .as_ref()
4134 .map(|config| &config.query)
4135 });
4136
4137 let configs = syntax_matches
4138 .grammars()
4139 .iter()
4140 .map(|grammar| grammar.injection_config.as_ref())
4141 .collect::<Vec<_>>();
4142
4143 iter::from_fn(move || {
4144 let ranges = syntax_matches.peek().and_then(|mat| {
4145 let config = &configs[mat.grammar_index]?;
4146 let content_capture_range = mat.captures.iter().find_map(|capture| {
4147 if capture.index == config.content_capture_ix {
4148 Some(capture.node.byte_range())
4149 } else {
4150 None
4151 }
4152 })?;
4153 let language = self.language_at(content_capture_range.start)?;
4154 Some((content_capture_range, language))
4155 });
4156 syntax_matches.advance();
4157 ranges
4158 })
4159 }
4160
4161 pub fn runnable_ranges(
4162 &self,
4163 offset_range: Range<usize>,
4164 ) -> impl Iterator<Item = RunnableRange> + '_ {
4165 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4166 grammar.runnable_config.as_ref().map(|config| &config.query)
4167 });
4168
4169 let test_configs = syntax_matches
4170 .grammars()
4171 .iter()
4172 .map(|grammar| grammar.runnable_config.as_ref())
4173 .collect::<Vec<_>>();
4174
4175 iter::from_fn(move || {
4176 loop {
4177 let mat = syntax_matches.peek()?;
4178
4179 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4180 let mut run_range = None;
4181 let full_range = mat.captures.iter().fold(
4182 Range {
4183 start: usize::MAX,
4184 end: 0,
4185 },
4186 |mut acc, next| {
4187 let byte_range = next.node.byte_range();
4188 if acc.start > byte_range.start {
4189 acc.start = byte_range.start;
4190 }
4191 if acc.end < byte_range.end {
4192 acc.end = byte_range.end;
4193 }
4194 acc
4195 },
4196 );
4197 if full_range.start > full_range.end {
4198 // We did not find a full spanning range of this match.
4199 return None;
4200 }
4201 let extra_captures: SmallVec<[_; 1]> =
4202 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4203 test_configs
4204 .extra_captures
4205 .get(capture.index as usize)
4206 .cloned()
4207 .and_then(|tag_name| match tag_name {
4208 RunnableCapture::Named(name) => {
4209 Some((capture.node.byte_range(), name))
4210 }
4211 RunnableCapture::Run => {
4212 let _ = run_range.insert(capture.node.byte_range());
4213 None
4214 }
4215 })
4216 }));
4217 let run_range = run_range?;
4218 let tags = test_configs
4219 .query
4220 .property_settings(mat.pattern_index)
4221 .iter()
4222 .filter_map(|property| {
4223 if *property.key == *"tag" {
4224 property
4225 .value
4226 .as_ref()
4227 .map(|value| RunnableTag(value.to_string().into()))
4228 } else {
4229 None
4230 }
4231 })
4232 .collect();
4233 let extra_captures = extra_captures
4234 .into_iter()
4235 .map(|(range, name)| {
4236 (
4237 name.to_string(),
4238 self.text_for_range(range.clone()).collect::<String>(),
4239 )
4240 })
4241 .collect();
4242 // All tags should have the same range.
4243 Some(RunnableRange {
4244 run_range,
4245 full_range,
4246 runnable: Runnable {
4247 tags,
4248 language: mat.language,
4249 buffer: self.remote_id(),
4250 },
4251 extra_captures,
4252 buffer_id: self.remote_id(),
4253 })
4254 });
4255
4256 syntax_matches.advance();
4257 if test_range.is_some() {
4258 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4259 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4260 return test_range;
4261 }
4262 }
4263 })
4264 }
4265
4266 /// Returns selections for remote peers intersecting the given range.
4267 #[allow(clippy::type_complexity)]
4268 pub fn selections_in_range(
4269 &self,
4270 range: Range<Anchor>,
4271 include_local: bool,
4272 ) -> impl Iterator<
4273 Item = (
4274 ReplicaId,
4275 bool,
4276 CursorShape,
4277 impl Iterator<Item = &Selection<Anchor>> + '_,
4278 ),
4279 > + '_ {
4280 self.remote_selections
4281 .iter()
4282 .filter(move |(replica_id, set)| {
4283 (include_local || **replica_id != self.text.replica_id())
4284 && !set.selections.is_empty()
4285 })
4286 .map(move |(replica_id, set)| {
4287 let start_ix = match set.selections.binary_search_by(|probe| {
4288 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4289 }) {
4290 Ok(ix) | Err(ix) => ix,
4291 };
4292 let end_ix = match set.selections.binary_search_by(|probe| {
4293 probe.start.cmp(&range.end, self).then(Ordering::Less)
4294 }) {
4295 Ok(ix) | Err(ix) => ix,
4296 };
4297
4298 (
4299 *replica_id,
4300 set.line_mode,
4301 set.cursor_shape,
4302 set.selections[start_ix..end_ix].iter(),
4303 )
4304 })
4305 }
4306
4307 /// Returns if the buffer contains any diagnostics.
4308 pub fn has_diagnostics(&self) -> bool {
4309 !self.diagnostics.is_empty()
4310 }
4311
4312 /// Returns all the diagnostics intersecting the given range.
4313 pub fn diagnostics_in_range<'a, T, O>(
4314 &'a self,
4315 search_range: Range<T>,
4316 reversed: bool,
4317 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4318 where
4319 T: 'a + Clone + ToOffset,
4320 O: 'a + FromAnchor,
4321 {
4322 let mut iterators: Vec<_> = self
4323 .diagnostics
4324 .iter()
4325 .map(|(_, collection)| {
4326 collection
4327 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4328 .peekable()
4329 })
4330 .collect();
4331
4332 std::iter::from_fn(move || {
4333 let (next_ix, _) = iterators
4334 .iter_mut()
4335 .enumerate()
4336 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4337 .min_by(|(_, a), (_, b)| {
4338 let cmp = a
4339 .range
4340 .start
4341 .cmp(&b.range.start, self)
4342 // when range is equal, sort by diagnostic severity
4343 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4344 // and stabilize order with group_id
4345 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4346 if reversed { cmp.reverse() } else { cmp }
4347 })?;
4348 iterators[next_ix]
4349 .next()
4350 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4351 diagnostic,
4352 range: FromAnchor::from_anchor(&range.start, self)
4353 ..FromAnchor::from_anchor(&range.end, self),
4354 })
4355 })
4356 }
4357
4358 /// Returns all the diagnostic groups associated with the given
4359 /// language server ID. If no language server ID is provided,
4360 /// all diagnostics groups are returned.
4361 pub fn diagnostic_groups(
4362 &self,
4363 language_server_id: Option<LanguageServerId>,
4364 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4365 let mut groups = Vec::new();
4366
4367 if let Some(language_server_id) = language_server_id {
4368 if let Ok(ix) = self
4369 .diagnostics
4370 .binary_search_by_key(&language_server_id, |e| e.0)
4371 {
4372 self.diagnostics[ix]
4373 .1
4374 .groups(language_server_id, &mut groups, self);
4375 }
4376 } else {
4377 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4378 diagnostics.groups(*language_server_id, &mut groups, self);
4379 }
4380 }
4381
4382 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4383 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4384 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4385 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4386 });
4387
4388 groups
4389 }
4390
4391 /// Returns an iterator over the diagnostics for the given group.
4392 pub fn diagnostic_group<O>(
4393 &self,
4394 group_id: usize,
4395 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4396 where
4397 O: FromAnchor + 'static,
4398 {
4399 self.diagnostics
4400 .iter()
4401 .flat_map(move |(_, set)| set.group(group_id, self))
4402 }
4403
4404 /// An integer version number that accounts for all updates besides
4405 /// the buffer's text itself (which is versioned via a version vector).
4406 pub fn non_text_state_update_count(&self) -> usize {
4407 self.non_text_state_update_count
4408 }
4409
4410 /// An integer version that changes when the buffer's syntax changes.
4411 pub fn syntax_update_count(&self) -> usize {
4412 self.syntax.update_count()
4413 }
4414
4415 /// Returns a snapshot of underlying file.
4416 pub fn file(&self) -> Option<&Arc<dyn File>> {
4417 self.file.as_ref()
4418 }
4419
4420 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4421 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4422 if let Some(file) = self.file() {
4423 if file.path().file_name().is_none() || include_root {
4424 Some(file.full_path(cx))
4425 } else {
4426 Some(file.path().to_path_buf())
4427 }
4428 } else {
4429 None
4430 }
4431 }
4432
4433 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4434 let query_str = query.fuzzy_contents;
4435 if query_str.map_or(false, |query| query.is_empty()) {
4436 return BTreeMap::default();
4437 }
4438
4439 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4440 language,
4441 override_id: None,
4442 }));
4443
4444 let mut query_ix = 0;
4445 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4446 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4447
4448 let mut words = BTreeMap::default();
4449 let mut current_word_start_ix = None;
4450 let mut chunk_ix = query.range.start;
4451 for chunk in self.chunks(query.range, false) {
4452 for (i, c) in chunk.text.char_indices() {
4453 let ix = chunk_ix + i;
4454 if classifier.is_word(c) {
4455 if current_word_start_ix.is_none() {
4456 current_word_start_ix = Some(ix);
4457 }
4458
4459 if let Some(query_chars) = &query_chars {
4460 if query_ix < query_len {
4461 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4462 query_ix += 1;
4463 }
4464 }
4465 }
4466 continue;
4467 } else if let Some(word_start) = current_word_start_ix.take() {
4468 if query_ix == query_len {
4469 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4470 let mut word_text = self.text_for_range(word_start..ix).peekable();
4471 let first_char = word_text
4472 .peek()
4473 .and_then(|first_chunk| first_chunk.chars().next());
4474 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4475 if !query.skip_digits
4476 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4477 {
4478 words.insert(word_text.collect(), word_range);
4479 }
4480 }
4481 }
4482 query_ix = 0;
4483 }
4484 chunk_ix += chunk.text.len();
4485 }
4486
4487 words
4488 }
4489}
4490
4491pub struct WordsQuery<'a> {
4492 /// Only returns words with all chars from the fuzzy string in them.
4493 pub fuzzy_contents: Option<&'a str>,
4494 /// Skips words that start with a digit.
4495 pub skip_digits: bool,
4496 /// Buffer offset range, to look for words.
4497 pub range: Range<usize>,
4498}
4499
4500fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4501 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4502}
4503
4504fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4505 let mut result = IndentSize::spaces(0);
4506 for c in text {
4507 let kind = match c {
4508 ' ' => IndentKind::Space,
4509 '\t' => IndentKind::Tab,
4510 _ => break,
4511 };
4512 if result.len == 0 {
4513 result.kind = kind;
4514 }
4515 result.len += 1;
4516 }
4517 result
4518}
4519
4520impl Clone for BufferSnapshot {
4521 fn clone(&self) -> Self {
4522 Self {
4523 text: self.text.clone(),
4524 syntax: self.syntax.clone(),
4525 file: self.file.clone(),
4526 remote_selections: self.remote_selections.clone(),
4527 diagnostics: self.diagnostics.clone(),
4528 language: self.language.clone(),
4529 non_text_state_update_count: self.non_text_state_update_count,
4530 }
4531 }
4532}
4533
4534impl Deref for BufferSnapshot {
4535 type Target = text::BufferSnapshot;
4536
4537 fn deref(&self) -> &Self::Target {
4538 &self.text
4539 }
4540}
4541
4542unsafe impl Send for BufferChunks<'_> {}
4543
4544impl<'a> BufferChunks<'a> {
4545 pub(crate) fn new(
4546 text: &'a Rope,
4547 range: Range<usize>,
4548 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4549 diagnostics: bool,
4550 buffer_snapshot: Option<&'a BufferSnapshot>,
4551 ) -> Self {
4552 let mut highlights = None;
4553 if let Some((captures, highlight_maps)) = syntax {
4554 highlights = Some(BufferChunkHighlights {
4555 captures,
4556 next_capture: None,
4557 stack: Default::default(),
4558 highlight_maps,
4559 })
4560 }
4561
4562 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4563 let chunks = text.chunks_in_range(range.clone());
4564
4565 let mut this = BufferChunks {
4566 range,
4567 buffer_snapshot,
4568 chunks,
4569 diagnostic_endpoints,
4570 error_depth: 0,
4571 warning_depth: 0,
4572 information_depth: 0,
4573 hint_depth: 0,
4574 unnecessary_depth: 0,
4575 underline: true,
4576 highlights,
4577 };
4578 this.initialize_diagnostic_endpoints();
4579 this
4580 }
4581
4582 /// Seeks to the given byte offset in the buffer.
4583 pub fn seek(&mut self, range: Range<usize>) {
4584 let old_range = std::mem::replace(&mut self.range, range.clone());
4585 self.chunks.set_range(self.range.clone());
4586 if let Some(highlights) = self.highlights.as_mut() {
4587 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4588 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4589 highlights
4590 .stack
4591 .retain(|(end_offset, _)| *end_offset > range.start);
4592 if let Some(capture) = &highlights.next_capture {
4593 if range.start >= capture.node.start_byte() {
4594 let next_capture_end = capture.node.end_byte();
4595 if range.start < next_capture_end {
4596 highlights.stack.push((
4597 next_capture_end,
4598 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4599 ));
4600 }
4601 highlights.next_capture.take();
4602 }
4603 }
4604 } else if let Some(snapshot) = self.buffer_snapshot {
4605 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4606 *highlights = BufferChunkHighlights {
4607 captures,
4608 next_capture: None,
4609 stack: Default::default(),
4610 highlight_maps,
4611 };
4612 } else {
4613 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4614 // Seeking such BufferChunks is not supported.
4615 debug_assert!(
4616 false,
4617 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4618 );
4619 }
4620
4621 highlights.captures.set_byte_range(self.range.clone());
4622 self.initialize_diagnostic_endpoints();
4623 }
4624 }
4625
4626 fn initialize_diagnostic_endpoints(&mut self) {
4627 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4628 if let Some(buffer) = self.buffer_snapshot {
4629 let mut diagnostic_endpoints = Vec::new();
4630 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4631 diagnostic_endpoints.push(DiagnosticEndpoint {
4632 offset: entry.range.start,
4633 is_start: true,
4634 severity: entry.diagnostic.severity,
4635 is_unnecessary: entry.diagnostic.is_unnecessary,
4636 underline: entry.diagnostic.underline,
4637 });
4638 diagnostic_endpoints.push(DiagnosticEndpoint {
4639 offset: entry.range.end,
4640 is_start: false,
4641 severity: entry.diagnostic.severity,
4642 is_unnecessary: entry.diagnostic.is_unnecessary,
4643 underline: entry.diagnostic.underline,
4644 });
4645 }
4646 diagnostic_endpoints
4647 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4648 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4649 self.hint_depth = 0;
4650 self.error_depth = 0;
4651 self.warning_depth = 0;
4652 self.information_depth = 0;
4653 }
4654 }
4655 }
4656
4657 /// The current byte offset in the buffer.
4658 pub fn offset(&self) -> usize {
4659 self.range.start
4660 }
4661
4662 pub fn range(&self) -> Range<usize> {
4663 self.range.clone()
4664 }
4665
4666 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4667 let depth = match endpoint.severity {
4668 DiagnosticSeverity::ERROR => &mut self.error_depth,
4669 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4670 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4671 DiagnosticSeverity::HINT => &mut self.hint_depth,
4672 _ => return,
4673 };
4674 if endpoint.is_start {
4675 *depth += 1;
4676 } else {
4677 *depth -= 1;
4678 }
4679
4680 if endpoint.is_unnecessary {
4681 if endpoint.is_start {
4682 self.unnecessary_depth += 1;
4683 } else {
4684 self.unnecessary_depth -= 1;
4685 }
4686 }
4687 }
4688
4689 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4690 if self.error_depth > 0 {
4691 Some(DiagnosticSeverity::ERROR)
4692 } else if self.warning_depth > 0 {
4693 Some(DiagnosticSeverity::WARNING)
4694 } else if self.information_depth > 0 {
4695 Some(DiagnosticSeverity::INFORMATION)
4696 } else if self.hint_depth > 0 {
4697 Some(DiagnosticSeverity::HINT)
4698 } else {
4699 None
4700 }
4701 }
4702
4703 fn current_code_is_unnecessary(&self) -> bool {
4704 self.unnecessary_depth > 0
4705 }
4706}
4707
4708impl<'a> Iterator for BufferChunks<'a> {
4709 type Item = Chunk<'a>;
4710
4711 fn next(&mut self) -> Option<Self::Item> {
4712 let mut next_capture_start = usize::MAX;
4713 let mut next_diagnostic_endpoint = usize::MAX;
4714
4715 if let Some(highlights) = self.highlights.as_mut() {
4716 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4717 if *parent_capture_end <= self.range.start {
4718 highlights.stack.pop();
4719 } else {
4720 break;
4721 }
4722 }
4723
4724 if highlights.next_capture.is_none() {
4725 highlights.next_capture = highlights.captures.next();
4726 }
4727
4728 while let Some(capture) = highlights.next_capture.as_ref() {
4729 if self.range.start < capture.node.start_byte() {
4730 next_capture_start = capture.node.start_byte();
4731 break;
4732 } else {
4733 let highlight_id =
4734 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4735 highlights
4736 .stack
4737 .push((capture.node.end_byte(), highlight_id));
4738 highlights.next_capture = highlights.captures.next();
4739 }
4740 }
4741 }
4742
4743 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4744 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4745 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4746 if endpoint.offset <= self.range.start {
4747 self.update_diagnostic_depths(endpoint);
4748 diagnostic_endpoints.next();
4749 self.underline = endpoint.underline;
4750 } else {
4751 next_diagnostic_endpoint = endpoint.offset;
4752 break;
4753 }
4754 }
4755 }
4756 self.diagnostic_endpoints = diagnostic_endpoints;
4757
4758 if let Some(chunk) = self.chunks.peek() {
4759 let chunk_start = self.range.start;
4760 let mut chunk_end = (self.chunks.offset() + chunk.len())
4761 .min(next_capture_start)
4762 .min(next_diagnostic_endpoint);
4763 let mut highlight_id = None;
4764 if let Some(highlights) = self.highlights.as_ref() {
4765 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4766 chunk_end = chunk_end.min(*parent_capture_end);
4767 highlight_id = Some(*parent_highlight_id);
4768 }
4769 }
4770
4771 let slice =
4772 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4773 self.range.start = chunk_end;
4774 if self.range.start == self.chunks.offset() + chunk.len() {
4775 self.chunks.next().unwrap();
4776 }
4777
4778 Some(Chunk {
4779 text: slice,
4780 syntax_highlight_id: highlight_id,
4781 underline: self.underline,
4782 diagnostic_severity: self.current_diagnostic_severity(),
4783 is_unnecessary: self.current_code_is_unnecessary(),
4784 ..Chunk::default()
4785 })
4786 } else {
4787 None
4788 }
4789 }
4790}
4791
4792impl operation_queue::Operation for Operation {
4793 fn lamport_timestamp(&self) -> clock::Lamport {
4794 match self {
4795 Operation::Buffer(_) => {
4796 unreachable!("buffer operations should never be deferred at this layer")
4797 }
4798 Operation::UpdateDiagnostics {
4799 lamport_timestamp, ..
4800 }
4801 | Operation::UpdateSelections {
4802 lamport_timestamp, ..
4803 }
4804 | Operation::UpdateCompletionTriggers {
4805 lamport_timestamp, ..
4806 } => *lamport_timestamp,
4807 }
4808 }
4809}
4810
4811impl Default for Diagnostic {
4812 fn default() -> Self {
4813 Self {
4814 source: Default::default(),
4815 source_kind: DiagnosticSourceKind::Other,
4816 code: None,
4817 code_description: None,
4818 severity: DiagnosticSeverity::ERROR,
4819 message: Default::default(),
4820 markdown: None,
4821 group_id: 0,
4822 is_primary: false,
4823 is_disk_based: false,
4824 is_unnecessary: false,
4825 underline: true,
4826 data: None,
4827 }
4828 }
4829}
4830
4831impl IndentSize {
4832 /// Returns an [`IndentSize`] representing the given spaces.
4833 pub fn spaces(len: u32) -> Self {
4834 Self {
4835 len,
4836 kind: IndentKind::Space,
4837 }
4838 }
4839
4840 /// Returns an [`IndentSize`] representing a tab.
4841 pub fn tab() -> Self {
4842 Self {
4843 len: 1,
4844 kind: IndentKind::Tab,
4845 }
4846 }
4847
4848 /// An iterator over the characters represented by this [`IndentSize`].
4849 pub fn chars(&self) -> impl Iterator<Item = char> {
4850 iter::repeat(self.char()).take(self.len as usize)
4851 }
4852
4853 /// The character representation of this [`IndentSize`].
4854 pub fn char(&self) -> char {
4855 match self.kind {
4856 IndentKind::Space => ' ',
4857 IndentKind::Tab => '\t',
4858 }
4859 }
4860
4861 /// Consumes the current [`IndentSize`] and returns a new one that has
4862 /// been shrunk or enlarged by the given size along the given direction.
4863 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4864 match direction {
4865 Ordering::Less => {
4866 if self.kind == size.kind && self.len >= size.len {
4867 self.len -= size.len;
4868 }
4869 }
4870 Ordering::Equal => {}
4871 Ordering::Greater => {
4872 if self.len == 0 {
4873 self = size;
4874 } else if self.kind == size.kind {
4875 self.len += size.len;
4876 }
4877 }
4878 }
4879 self
4880 }
4881
4882 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4883 match self.kind {
4884 IndentKind::Space => self.len as usize,
4885 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4886 }
4887 }
4888}
4889
4890#[cfg(any(test, feature = "test-support"))]
4891pub struct TestFile {
4892 pub path: Arc<Path>,
4893 pub root_name: String,
4894 pub local_root: Option<PathBuf>,
4895}
4896
4897#[cfg(any(test, feature = "test-support"))]
4898impl File for TestFile {
4899 fn path(&self) -> &Arc<Path> {
4900 &self.path
4901 }
4902
4903 fn full_path(&self, _: &gpui::App) -> PathBuf {
4904 PathBuf::from(&self.root_name).join(self.path.as_ref())
4905 }
4906
4907 fn as_local(&self) -> Option<&dyn LocalFile> {
4908 if self.local_root.is_some() {
4909 Some(self)
4910 } else {
4911 None
4912 }
4913 }
4914
4915 fn disk_state(&self) -> DiskState {
4916 unimplemented!()
4917 }
4918
4919 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4920 self.path().file_name().unwrap_or(self.root_name.as_ref())
4921 }
4922
4923 fn worktree_id(&self, _: &App) -> WorktreeId {
4924 WorktreeId::from_usize(0)
4925 }
4926
4927 fn to_proto(&self, _: &App) -> rpc::proto::File {
4928 unimplemented!()
4929 }
4930
4931 fn is_private(&self) -> bool {
4932 false
4933 }
4934}
4935
4936#[cfg(any(test, feature = "test-support"))]
4937impl LocalFile for TestFile {
4938 fn abs_path(&self, _cx: &App) -> PathBuf {
4939 PathBuf::from(self.local_root.as_ref().unwrap())
4940 .join(&self.root_name)
4941 .join(self.path.as_ref())
4942 }
4943
4944 fn load(&self, _cx: &App) -> Task<Result<String>> {
4945 unimplemented!()
4946 }
4947
4948 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4949 unimplemented!()
4950 }
4951}
4952
4953pub(crate) fn contiguous_ranges(
4954 values: impl Iterator<Item = u32>,
4955 max_len: usize,
4956) -> impl Iterator<Item = Range<u32>> {
4957 let mut values = values;
4958 let mut current_range: Option<Range<u32>> = None;
4959 std::iter::from_fn(move || {
4960 loop {
4961 if let Some(value) = values.next() {
4962 if let Some(range) = &mut current_range {
4963 if value == range.end && range.len() < max_len {
4964 range.end += 1;
4965 continue;
4966 }
4967 }
4968
4969 let prev_range = current_range.clone();
4970 current_range = Some(value..(value + 1));
4971 if prev_range.is_some() {
4972 return prev_range;
4973 }
4974 } else {
4975 return current_range.take();
4976 }
4977 }
4978 })
4979}
4980
4981#[derive(Default, Debug)]
4982pub struct CharClassifier {
4983 scope: Option<LanguageScope>,
4984 for_completion: bool,
4985 ignore_punctuation: bool,
4986}
4987
4988impl CharClassifier {
4989 pub fn new(scope: Option<LanguageScope>) -> Self {
4990 Self {
4991 scope,
4992 for_completion: false,
4993 ignore_punctuation: false,
4994 }
4995 }
4996
4997 pub fn for_completion(self, for_completion: bool) -> Self {
4998 Self {
4999 for_completion,
5000 ..self
5001 }
5002 }
5003
5004 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5005 Self {
5006 ignore_punctuation,
5007 ..self
5008 }
5009 }
5010
5011 pub fn is_whitespace(&self, c: char) -> bool {
5012 self.kind(c) == CharKind::Whitespace
5013 }
5014
5015 pub fn is_word(&self, c: char) -> bool {
5016 self.kind(c) == CharKind::Word
5017 }
5018
5019 pub fn is_punctuation(&self, c: char) -> bool {
5020 self.kind(c) == CharKind::Punctuation
5021 }
5022
5023 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5024 if c.is_alphanumeric() || c == '_' {
5025 return CharKind::Word;
5026 }
5027
5028 if let Some(scope) = &self.scope {
5029 let characters = if self.for_completion {
5030 scope.completion_query_characters()
5031 } else {
5032 scope.word_characters()
5033 };
5034 if let Some(characters) = characters {
5035 if characters.contains(&c) {
5036 return CharKind::Word;
5037 }
5038 }
5039 }
5040
5041 if c.is_whitespace() {
5042 return CharKind::Whitespace;
5043 }
5044
5045 if ignore_punctuation {
5046 CharKind::Word
5047 } else {
5048 CharKind::Punctuation
5049 }
5050 }
5051
5052 pub fn kind(&self, c: char) -> CharKind {
5053 self.kind_with(c, self.ignore_punctuation)
5054 }
5055}
5056
5057/// Find all of the ranges of whitespace that occur at the ends of lines
5058/// in the given rope.
5059///
5060/// This could also be done with a regex search, but this implementation
5061/// avoids copying text.
5062pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5063 let mut ranges = Vec::new();
5064
5065 let mut offset = 0;
5066 let mut prev_chunk_trailing_whitespace_range = 0..0;
5067 for chunk in rope.chunks() {
5068 let mut prev_line_trailing_whitespace_range = 0..0;
5069 for (i, line) in chunk.split('\n').enumerate() {
5070 let line_end_offset = offset + line.len();
5071 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5072 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5073
5074 if i == 0 && trimmed_line_len == 0 {
5075 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5076 }
5077 if !prev_line_trailing_whitespace_range.is_empty() {
5078 ranges.push(prev_line_trailing_whitespace_range);
5079 }
5080
5081 offset = line_end_offset + 1;
5082 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5083 }
5084
5085 offset -= 1;
5086 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5087 }
5088
5089 if !prev_chunk_trailing_whitespace_range.is_empty() {
5090 ranges.push(prev_chunk_trailing_whitespace_range);
5091 }
5092
5093 ranges
5094}