1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 &syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 &syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 &syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 &syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation {
1162 if let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169 }
1170
1171 fn on_base_buffer_event(
1172 &mut self,
1173 _: Entity<Buffer>,
1174 event: &BufferEvent,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let BufferEvent::Operation { operation, .. } = event else {
1178 return;
1179 };
1180 let Some(BufferBranchState {
1181 merged_operations, ..
1182 }) = &mut self.branch_state
1183 else {
1184 return;
1185 };
1186
1187 let mut operation_to_undo = None;
1188 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1189 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1190 merged_operations.remove(ix);
1191 operation_to_undo = Some(operation.timestamp);
1192 }
1193 }
1194
1195 self.apply_ops([operation.clone()], cx);
1196
1197 if let Some(timestamp) = operation_to_undo {
1198 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1199 self.undo_operations(counts, cx);
1200 }
1201 }
1202
1203 #[cfg(test)]
1204 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1205 &self.text
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's raw text, without any
1209 /// language-related state like the syntax tree or diagnostics.
1210 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1211 self.text.snapshot()
1212 }
1213
1214 /// The file associated with the buffer, if any.
1215 pub fn file(&self) -> Option<&Arc<dyn File>> {
1216 self.file.as_ref()
1217 }
1218
1219 /// The version of the buffer that was last saved or reloaded from disk.
1220 pub fn saved_version(&self) -> &clock::Global {
1221 &self.saved_version
1222 }
1223
1224 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1225 pub fn saved_mtime(&self) -> Option<MTime> {
1226 self.saved_mtime
1227 }
1228
1229 /// Assign a language to the buffer.
1230 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1231 self.non_text_state_update_count += 1;
1232 self.syntax_map.lock().clear(&self.text);
1233 self.language = language;
1234 self.was_changed();
1235 self.reparse(cx);
1236 cx.emit(BufferEvent::LanguageChanged);
1237 }
1238
1239 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1240 /// other languages if parts of the buffer are written in different languages.
1241 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1242 self.syntax_map
1243 .lock()
1244 .set_language_registry(language_registry);
1245 }
1246
1247 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1248 self.syntax_map.lock().language_registry()
1249 }
1250
1251 /// Assign the buffer a new [`Capability`].
1252 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1253 self.capability = capability;
1254 cx.emit(BufferEvent::CapabilityChanged)
1255 }
1256
1257 /// This method is called to signal that the buffer has been saved.
1258 pub fn did_save(
1259 &mut self,
1260 version: clock::Global,
1261 mtime: Option<MTime>,
1262 cx: &mut Context<Self>,
1263 ) {
1264 self.saved_version = version;
1265 self.has_unsaved_edits
1266 .set((self.saved_version().clone(), false));
1267 self.has_conflict = false;
1268 self.saved_mtime = mtime;
1269 self.was_changed();
1270 cx.emit(BufferEvent::Saved);
1271 cx.notify();
1272 }
1273
1274 /// This method is called to signal that the buffer has been discarded.
1275 pub fn discarded(&self, cx: &mut Context<Self>) {
1276 cx.emit(BufferEvent::Discarded);
1277 cx.notify();
1278 }
1279
1280 /// Reloads the contents of the buffer from disk.
1281 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1282 let (tx, rx) = futures::channel::oneshot::channel();
1283 let prev_version = self.text.version();
1284 self.reload_task = Some(cx.spawn(async move |this, cx| {
1285 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1286 let file = this.file.as_ref()?.as_local()?;
1287
1288 Some((file.disk_state().mtime(), file.load(cx)))
1289 })?
1290 else {
1291 return Ok(());
1292 };
1293
1294 let new_text = new_text.await?;
1295 let diff = this
1296 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1297 .await;
1298 this.update(cx, |this, cx| {
1299 if this.version() == diff.base_version {
1300 this.finalize_last_transaction();
1301 this.apply_diff(diff, cx);
1302 tx.send(this.finalize_last_transaction().cloned()).ok();
1303 this.has_conflict = false;
1304 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1305 } else {
1306 if !diff.edits.is_empty()
1307 || this
1308 .edits_since::<usize>(&diff.base_version)
1309 .next()
1310 .is_some()
1311 {
1312 this.has_conflict = true;
1313 }
1314
1315 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1316 }
1317
1318 this.reload_task.take();
1319 })
1320 }));
1321 rx
1322 }
1323
1324 /// This method is called to signal that the buffer has been reloaded.
1325 pub fn did_reload(
1326 &mut self,
1327 version: clock::Global,
1328 line_ending: LineEnding,
1329 mtime: Option<MTime>,
1330 cx: &mut Context<Self>,
1331 ) {
1332 self.saved_version = version;
1333 self.has_unsaved_edits
1334 .set((self.saved_version.clone(), false));
1335 self.text.set_line_ending(line_ending);
1336 self.saved_mtime = mtime;
1337 cx.emit(BufferEvent::Reloaded);
1338 cx.notify();
1339 }
1340
1341 /// Updates the [`File`] backing this buffer. This should be called when
1342 /// the file has changed or has been deleted.
1343 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1344 let was_dirty = self.is_dirty();
1345 let mut file_changed = false;
1346
1347 if let Some(old_file) = self.file.as_ref() {
1348 if new_file.path() != old_file.path() {
1349 file_changed = true;
1350 }
1351
1352 let old_state = old_file.disk_state();
1353 let new_state = new_file.disk_state();
1354 if old_state != new_state {
1355 file_changed = true;
1356 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1357 cx.emit(BufferEvent::ReloadNeeded)
1358 }
1359 }
1360 } else {
1361 file_changed = true;
1362 };
1363
1364 self.file = Some(new_file);
1365 if file_changed {
1366 self.was_changed();
1367 self.non_text_state_update_count += 1;
1368 if was_dirty != self.is_dirty() {
1369 cx.emit(BufferEvent::DirtyChanged);
1370 }
1371 cx.emit(BufferEvent::FileHandleChanged);
1372 cx.notify();
1373 }
1374 }
1375
1376 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1377 Some(self.branch_state.as_ref()?.base_buffer.clone())
1378 }
1379
1380 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1381 pub fn language(&self) -> Option<&Arc<Language>> {
1382 self.language.as_ref()
1383 }
1384
1385 /// Returns the [`Language`] at the given location.
1386 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1387 let offset = position.to_offset(self);
1388 let mut is_first = true;
1389 let start_anchor = self.anchor_before(offset);
1390 let end_anchor = self.anchor_after(offset);
1391 self.syntax_map
1392 .lock()
1393 .layers_for_range(offset..offset, &self.text, false)
1394 .filter(|layer| {
1395 if is_first {
1396 is_first = false;
1397 return true;
1398 }
1399 let any_sub_ranges_contain_range = layer
1400 .included_sub_ranges
1401 .map(|sub_ranges| {
1402 sub_ranges.iter().any(|sub_range| {
1403 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1404 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1405 !is_before_start && !is_after_end
1406 })
1407 })
1408 .unwrap_or(true);
1409 let result = any_sub_ranges_contain_range;
1410 return result;
1411 })
1412 .last()
1413 .map(|info| info.language.clone())
1414 .or_else(|| self.language.clone())
1415 }
1416
1417 /// Returns each [`Language`] for the active syntax layers at the given location.
1418 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1419 let offset = position.to_offset(self);
1420 let mut languages: Vec<Arc<Language>> = self
1421 .syntax_map
1422 .lock()
1423 .layers_for_range(offset..offset, &self.text, false)
1424 .map(|info| info.language.clone())
1425 .collect();
1426
1427 if languages.is_empty() {
1428 if let Some(buffer_language) = self.language() {
1429 languages.push(buffer_language.clone());
1430 }
1431 }
1432
1433 languages
1434 }
1435
1436 /// An integer version number that accounts for all updates besides
1437 /// the buffer's text itself (which is versioned via a version vector).
1438 pub fn non_text_state_update_count(&self) -> usize {
1439 self.non_text_state_update_count
1440 }
1441
1442 /// Whether the buffer is being parsed in the background.
1443 #[cfg(any(test, feature = "test-support"))]
1444 pub fn is_parsing(&self) -> bool {
1445 self.reparse.is_some()
1446 }
1447
1448 /// Indicates whether the buffer contains any regions that may be
1449 /// written in a language that hasn't been loaded yet.
1450 pub fn contains_unknown_injections(&self) -> bool {
1451 self.syntax_map.lock().contains_unknown_injections()
1452 }
1453
1454 #[cfg(any(test, feature = "test-support"))]
1455 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1456 self.sync_parse_timeout = timeout;
1457 }
1458
1459 /// Called after an edit to synchronize the buffer's main parse tree with
1460 /// the buffer's new underlying state.
1461 ///
1462 /// Locks the syntax map and interpolates the edits since the last reparse
1463 /// into the foreground syntax tree.
1464 ///
1465 /// Then takes a stable snapshot of the syntax map before unlocking it.
1466 /// The snapshot with the interpolated edits is sent to a background thread,
1467 /// where we ask Tree-sitter to perform an incremental parse.
1468 ///
1469 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1470 /// waiting on the parse to complete. As soon as it completes, we proceed
1471 /// synchronously, unless a 1ms timeout elapses.
1472 ///
1473 /// If we time out waiting on the parse, we spawn a second task waiting
1474 /// until the parse does complete and return with the interpolated tree still
1475 /// in the foreground. When the background parse completes, call back into
1476 /// the main thread and assign the foreground parse state.
1477 ///
1478 /// If the buffer or grammar changed since the start of the background parse,
1479 /// initiate an additional reparse recursively. To avoid concurrent parses
1480 /// for the same buffer, we only initiate a new parse if we are not already
1481 /// parsing in the background.
1482 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1483 if self.reparse.is_some() {
1484 return;
1485 }
1486 let language = if let Some(language) = self.language.clone() {
1487 language
1488 } else {
1489 return;
1490 };
1491
1492 let text = self.text_snapshot();
1493 let parsed_version = self.version();
1494
1495 let mut syntax_map = self.syntax_map.lock();
1496 syntax_map.interpolate(&text);
1497 let language_registry = syntax_map.language_registry();
1498 let mut syntax_snapshot = syntax_map.snapshot();
1499 drop(syntax_map);
1500
1501 let parse_task = cx.background_spawn({
1502 let language = language.clone();
1503 let language_registry = language_registry.clone();
1504 async move {
1505 syntax_snapshot.reparse(&text, language_registry, language);
1506 syntax_snapshot
1507 }
1508 });
1509
1510 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1511 match cx
1512 .background_executor()
1513 .block_with_timeout(self.sync_parse_timeout, parse_task)
1514 {
1515 Ok(new_syntax_snapshot) => {
1516 self.did_finish_parsing(new_syntax_snapshot, cx);
1517 self.reparse = None;
1518 }
1519 Err(parse_task) => {
1520 self.reparse = Some(cx.spawn(async move |this, cx| {
1521 let new_syntax_map = parse_task.await;
1522 this.update(cx, move |this, cx| {
1523 let grammar_changed =
1524 this.language.as_ref().map_or(true, |current_language| {
1525 !Arc::ptr_eq(&language, current_language)
1526 });
1527 let language_registry_changed = new_syntax_map
1528 .contains_unknown_injections()
1529 && language_registry.map_or(false, |registry| {
1530 registry.version() != new_syntax_map.language_registry_version()
1531 });
1532 let parse_again = language_registry_changed
1533 || grammar_changed
1534 || this.version.changed_since(&parsed_version);
1535 this.did_finish_parsing(new_syntax_map, cx);
1536 this.reparse = None;
1537 if parse_again {
1538 this.reparse(cx);
1539 }
1540 })
1541 .ok();
1542 }));
1543 }
1544 }
1545 }
1546
1547 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1548 self.was_changed();
1549 self.non_text_state_update_count += 1;
1550 self.syntax_map.lock().did_parse(syntax_snapshot);
1551 self.request_autoindent(cx);
1552 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1553 cx.emit(BufferEvent::Reparsed);
1554 cx.notify();
1555 }
1556
1557 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1558 self.parse_status.1.clone()
1559 }
1560
1561 /// Assign to the buffer a set of diagnostics created by a given language server.
1562 pub fn update_diagnostics(
1563 &mut self,
1564 server_id: LanguageServerId,
1565 diagnostics: DiagnosticSet,
1566 cx: &mut Context<Self>,
1567 ) {
1568 let lamport_timestamp = self.text.lamport_clock.tick();
1569 let op = Operation::UpdateDiagnostics {
1570 server_id,
1571 diagnostics: diagnostics.iter().cloned().collect(),
1572 lamport_timestamp,
1573 };
1574
1575 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1576 self.send_operation(op, true, cx);
1577 }
1578
1579 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1580 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1581 return None;
1582 };
1583 Some(&self.diagnostics[idx].1)
1584 }
1585
1586 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1587 if let Some(indent_sizes) = self.compute_autoindents() {
1588 let indent_sizes = cx.background_spawn(indent_sizes);
1589 match cx
1590 .background_executor()
1591 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1592 {
1593 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1594 Err(indent_sizes) => {
1595 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1596 let indent_sizes = indent_sizes.await;
1597 this.update(cx, |this, cx| {
1598 this.apply_autoindents(indent_sizes, cx);
1599 })
1600 .ok();
1601 }));
1602 }
1603 }
1604 } else {
1605 self.autoindent_requests.clear();
1606 for tx in self.wait_for_autoindent_txs.drain(..) {
1607 tx.send(()).ok();
1608 }
1609 }
1610 }
1611
1612 fn compute_autoindents(
1613 &self,
1614 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1615 let max_rows_between_yields = 100;
1616 let snapshot = self.snapshot();
1617 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1618 return None;
1619 }
1620
1621 let autoindent_requests = self.autoindent_requests.clone();
1622 Some(async move {
1623 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1624 for request in autoindent_requests {
1625 // Resolve each edited range to its row in the current buffer and in the
1626 // buffer before this batch of edits.
1627 let mut row_ranges = Vec::new();
1628 let mut old_to_new_rows = BTreeMap::new();
1629 let mut language_indent_sizes_by_new_row = Vec::new();
1630 for entry in &request.entries {
1631 let position = entry.range.start;
1632 let new_row = position.to_point(&snapshot).row;
1633 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1634 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1635
1636 if !entry.first_line_is_new {
1637 let old_row = position.to_point(&request.before_edit).row;
1638 old_to_new_rows.insert(old_row, new_row);
1639 }
1640 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1641 }
1642
1643 // Build a map containing the suggested indentation for each of the edited lines
1644 // with respect to the state of the buffer before these edits. This map is keyed
1645 // by the rows for these lines in the current state of the buffer.
1646 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1647 let old_edited_ranges =
1648 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1649 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1650 let mut language_indent_size = IndentSize::default();
1651 for old_edited_range in old_edited_ranges {
1652 let suggestions = request
1653 .before_edit
1654 .suggest_autoindents(old_edited_range.clone())
1655 .into_iter()
1656 .flatten();
1657 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1658 if let Some(suggestion) = suggestion {
1659 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1660
1661 // Find the indent size based on the language for this row.
1662 while let Some((row, size)) = language_indent_sizes.peek() {
1663 if *row > new_row {
1664 break;
1665 }
1666 language_indent_size = *size;
1667 language_indent_sizes.next();
1668 }
1669
1670 let suggested_indent = old_to_new_rows
1671 .get(&suggestion.basis_row)
1672 .and_then(|from_row| {
1673 Some(old_suggestions.get(from_row).copied()?.0)
1674 })
1675 .unwrap_or_else(|| {
1676 request
1677 .before_edit
1678 .indent_size_for_line(suggestion.basis_row)
1679 })
1680 .with_delta(suggestion.delta, language_indent_size);
1681 old_suggestions
1682 .insert(new_row, (suggested_indent, suggestion.within_error));
1683 }
1684 }
1685 yield_now().await;
1686 }
1687
1688 // Compute new suggestions for each line, but only include them in the result
1689 // if they differ from the old suggestion for that line.
1690 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1691 let mut language_indent_size = IndentSize::default();
1692 for (row_range, original_indent_column) in row_ranges {
1693 let new_edited_row_range = if request.is_block_mode {
1694 row_range.start..row_range.start + 1
1695 } else {
1696 row_range.clone()
1697 };
1698
1699 let suggestions = snapshot
1700 .suggest_autoindents(new_edited_row_range.clone())
1701 .into_iter()
1702 .flatten();
1703 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1704 if let Some(suggestion) = suggestion {
1705 // Find the indent size based on the language for this row.
1706 while let Some((row, size)) = language_indent_sizes.peek() {
1707 if *row > new_row {
1708 break;
1709 }
1710 language_indent_size = *size;
1711 language_indent_sizes.next();
1712 }
1713
1714 let suggested_indent = indent_sizes
1715 .get(&suggestion.basis_row)
1716 .copied()
1717 .map(|e| e.0)
1718 .unwrap_or_else(|| {
1719 snapshot.indent_size_for_line(suggestion.basis_row)
1720 })
1721 .with_delta(suggestion.delta, language_indent_size);
1722
1723 if old_suggestions.get(&new_row).map_or(
1724 true,
1725 |(old_indentation, was_within_error)| {
1726 suggested_indent != *old_indentation
1727 && (!suggestion.within_error || *was_within_error)
1728 },
1729 ) {
1730 indent_sizes.insert(
1731 new_row,
1732 (suggested_indent, request.ignore_empty_lines),
1733 );
1734 }
1735 }
1736 }
1737
1738 if let (true, Some(original_indent_column)) =
1739 (request.is_block_mode, original_indent_column)
1740 {
1741 let new_indent =
1742 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1743 *indent
1744 } else {
1745 snapshot.indent_size_for_line(row_range.start)
1746 };
1747 let delta = new_indent.len as i64 - original_indent_column as i64;
1748 if delta != 0 {
1749 for row in row_range.skip(1) {
1750 indent_sizes.entry(row).or_insert_with(|| {
1751 let mut size = snapshot.indent_size_for_line(row);
1752 if size.kind == new_indent.kind {
1753 match delta.cmp(&0) {
1754 Ordering::Greater => size.len += delta as u32,
1755 Ordering::Less => {
1756 size.len = size.len.saturating_sub(-delta as u32)
1757 }
1758 Ordering::Equal => {}
1759 }
1760 }
1761 (size, request.ignore_empty_lines)
1762 });
1763 }
1764 }
1765 }
1766
1767 yield_now().await;
1768 }
1769 }
1770
1771 indent_sizes
1772 .into_iter()
1773 .filter_map(|(row, (indent, ignore_empty_lines))| {
1774 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1775 None
1776 } else {
1777 Some((row, indent))
1778 }
1779 })
1780 .collect()
1781 })
1782 }
1783
1784 fn apply_autoindents(
1785 &mut self,
1786 indent_sizes: BTreeMap<u32, IndentSize>,
1787 cx: &mut Context<Self>,
1788 ) {
1789 self.autoindent_requests.clear();
1790 for tx in self.wait_for_autoindent_txs.drain(..) {
1791 tx.send(()).ok();
1792 }
1793
1794 let edits: Vec<_> = indent_sizes
1795 .into_iter()
1796 .filter_map(|(row, indent_size)| {
1797 let current_size = indent_size_for_line(self, row);
1798 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1799 })
1800 .collect();
1801
1802 let preserve_preview = self.preserve_preview();
1803 self.edit(edits, None, cx);
1804 if preserve_preview {
1805 self.refresh_preview();
1806 }
1807 }
1808
1809 /// Create a minimal edit that will cause the given row to be indented
1810 /// with the given size. After applying this edit, the length of the line
1811 /// will always be at least `new_size.len`.
1812 pub fn edit_for_indent_size_adjustment(
1813 row: u32,
1814 current_size: IndentSize,
1815 new_size: IndentSize,
1816 ) -> Option<(Range<Point>, String)> {
1817 if new_size.kind == current_size.kind {
1818 match new_size.len.cmp(¤t_size.len) {
1819 Ordering::Greater => {
1820 let point = Point::new(row, 0);
1821 Some((
1822 point..point,
1823 iter::repeat(new_size.char())
1824 .take((new_size.len - current_size.len) as usize)
1825 .collect::<String>(),
1826 ))
1827 }
1828
1829 Ordering::Less => Some((
1830 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1831 String::new(),
1832 )),
1833
1834 Ordering::Equal => None,
1835 }
1836 } else {
1837 Some((
1838 Point::new(row, 0)..Point::new(row, current_size.len),
1839 iter::repeat(new_size.char())
1840 .take(new_size.len as usize)
1841 .collect::<String>(),
1842 ))
1843 }
1844 }
1845
1846 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1847 /// and the given new text.
1848 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1849 let old_text = self.as_rope().clone();
1850 let base_version = self.version();
1851 cx.background_executor()
1852 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1853 let old_text = old_text.to_string();
1854 let line_ending = LineEnding::detect(&new_text);
1855 LineEnding::normalize(&mut new_text);
1856 let edits = text_diff(&old_text, &new_text);
1857 Diff {
1858 base_version,
1859 line_ending,
1860 edits,
1861 }
1862 })
1863 }
1864
1865 /// Spawns a background task that searches the buffer for any whitespace
1866 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1867 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1868 let old_text = self.as_rope().clone();
1869 let line_ending = self.line_ending();
1870 let base_version = self.version();
1871 cx.background_spawn(async move {
1872 let ranges = trailing_whitespace_ranges(&old_text);
1873 let empty = Arc::<str>::from("");
1874 Diff {
1875 base_version,
1876 line_ending,
1877 edits: ranges
1878 .into_iter()
1879 .map(|range| (range, empty.clone()))
1880 .collect(),
1881 }
1882 })
1883 }
1884
1885 /// Ensures that the buffer ends with a single newline character, and
1886 /// no other whitespace. Skips if the buffer is empty.
1887 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1888 let len = self.len();
1889 if len == 0 {
1890 return;
1891 }
1892 let mut offset = len;
1893 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1894 let non_whitespace_len = chunk
1895 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1896 .len();
1897 offset -= chunk.len();
1898 offset += non_whitespace_len;
1899 if non_whitespace_len != 0 {
1900 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1901 return;
1902 }
1903 break;
1904 }
1905 }
1906 self.edit([(offset..len, "\n")], None, cx);
1907 }
1908
1909 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1910 /// calculated, then adjust the diff to account for those changes, and discard any
1911 /// parts of the diff that conflict with those changes.
1912 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1913 let snapshot = self.snapshot();
1914 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1915 let mut delta = 0;
1916 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1917 while let Some(edit_since) = edits_since.peek() {
1918 // If the edit occurs after a diff hunk, then it does not
1919 // affect that hunk.
1920 if edit_since.old.start > range.end {
1921 break;
1922 }
1923 // If the edit precedes the diff hunk, then adjust the hunk
1924 // to reflect the edit.
1925 else if edit_since.old.end < range.start {
1926 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1927 edits_since.next();
1928 }
1929 // If the edit intersects a diff hunk, then discard that hunk.
1930 else {
1931 return None;
1932 }
1933 }
1934
1935 let start = (range.start as i64 + delta) as usize;
1936 let end = (range.end as i64 + delta) as usize;
1937 Some((start..end, new_text))
1938 });
1939
1940 self.start_transaction();
1941 self.text.set_line_ending(diff.line_ending);
1942 self.edit(adjusted_edits, None, cx);
1943 self.end_transaction(cx)
1944 }
1945
1946 fn has_unsaved_edits(&self) -> bool {
1947 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1948
1949 if last_version == self.version {
1950 self.has_unsaved_edits
1951 .set((last_version, has_unsaved_edits));
1952 return has_unsaved_edits;
1953 }
1954
1955 let has_edits = self.has_edits_since(&self.saved_version);
1956 self.has_unsaved_edits
1957 .set((self.version.clone(), has_edits));
1958 has_edits
1959 }
1960
1961 /// Checks if the buffer has unsaved changes.
1962 pub fn is_dirty(&self) -> bool {
1963 if self.capability == Capability::ReadOnly {
1964 return false;
1965 }
1966 if self.has_conflict {
1967 return true;
1968 }
1969 match self.file.as_ref().map(|f| f.disk_state()) {
1970 Some(DiskState::New) | Some(DiskState::Deleted) => {
1971 !self.is_empty() && self.has_unsaved_edits()
1972 }
1973 _ => self.has_unsaved_edits(),
1974 }
1975 }
1976
1977 /// Checks if the buffer and its file have both changed since the buffer
1978 /// was last saved or reloaded.
1979 pub fn has_conflict(&self) -> bool {
1980 if self.has_conflict {
1981 return true;
1982 }
1983 let Some(file) = self.file.as_ref() else {
1984 return false;
1985 };
1986 match file.disk_state() {
1987 DiskState::New => false,
1988 DiskState::Present { mtime } => match self.saved_mtime {
1989 Some(saved_mtime) => {
1990 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1991 }
1992 None => true,
1993 },
1994 DiskState::Deleted => false,
1995 }
1996 }
1997
1998 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1999 pub fn subscribe(&mut self) -> Subscription {
2000 self.text.subscribe()
2001 }
2002
2003 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2004 ///
2005 /// This allows downstream code to check if the buffer's text has changed without
2006 /// waiting for an effect cycle, which would be required if using eents.
2007 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2008 if let Err(ix) = self
2009 .change_bits
2010 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2011 {
2012 self.change_bits.insert(ix, bit);
2013 }
2014 }
2015
2016 fn was_changed(&mut self) {
2017 self.change_bits.retain(|change_bit| {
2018 change_bit.upgrade().map_or(false, |bit| {
2019 bit.replace(true);
2020 true
2021 })
2022 });
2023 }
2024
2025 /// Starts a transaction, if one is not already in-progress. When undoing or
2026 /// redoing edits, all of the edits performed within a transaction are undone
2027 /// or redone together.
2028 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2029 self.start_transaction_at(Instant::now())
2030 }
2031
2032 /// Starts a transaction, providing the current time. Subsequent transactions
2033 /// that occur within a short period of time will be grouped together. This
2034 /// is controlled by the buffer's undo grouping duration.
2035 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2036 self.transaction_depth += 1;
2037 if self.was_dirty_before_starting_transaction.is_none() {
2038 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2039 }
2040 self.text.start_transaction_at(now)
2041 }
2042
2043 /// Terminates the current transaction, if this is the outermost transaction.
2044 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2045 self.end_transaction_at(Instant::now(), cx)
2046 }
2047
2048 /// Terminates the current transaction, providing the current time. Subsequent transactions
2049 /// that occur within a short period of time will be grouped together. This
2050 /// is controlled by the buffer's undo grouping duration.
2051 pub fn end_transaction_at(
2052 &mut self,
2053 now: Instant,
2054 cx: &mut Context<Self>,
2055 ) -> Option<TransactionId> {
2056 assert!(self.transaction_depth > 0);
2057 self.transaction_depth -= 1;
2058 let was_dirty = if self.transaction_depth == 0 {
2059 self.was_dirty_before_starting_transaction.take().unwrap()
2060 } else {
2061 false
2062 };
2063 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2064 self.did_edit(&start_version, was_dirty, cx);
2065 Some(transaction_id)
2066 } else {
2067 None
2068 }
2069 }
2070
2071 /// Manually add a transaction to the buffer's undo history.
2072 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2073 self.text.push_transaction(transaction, now);
2074 }
2075
2076 /// Differs from `push_transaction` in that it does not clear the redo
2077 /// stack. Intended to be used to create a parent transaction to merge
2078 /// potential child transactions into.
2079 ///
2080 /// The caller is responsible for removing it from the undo history using
2081 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2082 /// are merged into this transaction, the caller is responsible for ensuring
2083 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2084 /// cleared is to create transactions with the usual `start_transaction` and
2085 /// `end_transaction` methods and merging the resulting transactions into
2086 /// the transaction created by this method
2087 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2088 self.text.push_empty_transaction(now)
2089 }
2090
2091 /// Prevent the last transaction from being grouped with any subsequent transactions,
2092 /// even if they occur with the buffer's undo grouping duration.
2093 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2094 self.text.finalize_last_transaction()
2095 }
2096
2097 /// Manually group all changes since a given transaction.
2098 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2099 self.text.group_until_transaction(transaction_id);
2100 }
2101
2102 /// Manually remove a transaction from the buffer's undo history
2103 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2104 self.text.forget_transaction(transaction_id)
2105 }
2106
2107 /// Retrieve a transaction from the buffer's undo history
2108 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2109 self.text.get_transaction(transaction_id)
2110 }
2111
2112 /// Manually merge two transactions in the buffer's undo history.
2113 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2114 self.text.merge_transactions(transaction, destination);
2115 }
2116
2117 /// Waits for the buffer to receive operations with the given timestamps.
2118 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2119 &mut self,
2120 edit_ids: It,
2121 ) -> impl Future<Output = Result<()>> + use<It> {
2122 self.text.wait_for_edits(edit_ids)
2123 }
2124
2125 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2126 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2127 &mut self,
2128 anchors: It,
2129 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2130 self.text.wait_for_anchors(anchors)
2131 }
2132
2133 /// Waits for the buffer to receive operations up to the given version.
2134 pub fn wait_for_version(
2135 &mut self,
2136 version: clock::Global,
2137 ) -> impl Future<Output = Result<()>> + use<> {
2138 self.text.wait_for_version(version)
2139 }
2140
2141 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2142 /// [`Buffer::wait_for_version`] to resolve with an error.
2143 pub fn give_up_waiting(&mut self) {
2144 self.text.give_up_waiting();
2145 }
2146
2147 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2148 let mut rx = None;
2149 if !self.autoindent_requests.is_empty() {
2150 let channel = oneshot::channel();
2151 self.wait_for_autoindent_txs.push(channel.0);
2152 rx = Some(channel.1);
2153 }
2154 rx
2155 }
2156
2157 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2158 pub fn set_active_selections(
2159 &mut self,
2160 selections: Arc<[Selection<Anchor>]>,
2161 line_mode: bool,
2162 cursor_shape: CursorShape,
2163 cx: &mut Context<Self>,
2164 ) {
2165 let lamport_timestamp = self.text.lamport_clock.tick();
2166 self.remote_selections.insert(
2167 self.text.replica_id(),
2168 SelectionSet {
2169 selections: selections.clone(),
2170 lamport_timestamp,
2171 line_mode,
2172 cursor_shape,
2173 },
2174 );
2175 self.send_operation(
2176 Operation::UpdateSelections {
2177 selections,
2178 line_mode,
2179 lamport_timestamp,
2180 cursor_shape,
2181 },
2182 true,
2183 cx,
2184 );
2185 self.non_text_state_update_count += 1;
2186 cx.notify();
2187 }
2188
2189 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2190 /// this replica.
2191 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2192 if self
2193 .remote_selections
2194 .get(&self.text.replica_id())
2195 .map_or(true, |set| !set.selections.is_empty())
2196 {
2197 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2198 }
2199 }
2200
2201 pub fn set_agent_selections(
2202 &mut self,
2203 selections: Arc<[Selection<Anchor>]>,
2204 line_mode: bool,
2205 cursor_shape: CursorShape,
2206 cx: &mut Context<Self>,
2207 ) {
2208 let lamport_timestamp = self.text.lamport_clock.tick();
2209 self.remote_selections.insert(
2210 AGENT_REPLICA_ID,
2211 SelectionSet {
2212 selections: selections.clone(),
2213 lamport_timestamp,
2214 line_mode,
2215 cursor_shape,
2216 },
2217 );
2218 self.non_text_state_update_count += 1;
2219 cx.notify();
2220 }
2221
2222 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2223 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2224 }
2225
2226 /// Replaces the buffer's entire text.
2227 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2228 where
2229 T: Into<Arc<str>>,
2230 {
2231 self.autoindent_requests.clear();
2232 self.edit([(0..self.len(), text)], None, cx)
2233 }
2234
2235 /// Appends the given text to the end of the buffer.
2236 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2237 where
2238 T: Into<Arc<str>>,
2239 {
2240 self.edit([(self.len()..self.len(), text)], None, cx)
2241 }
2242
2243 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2244 /// delete, and a string of text to insert at that location.
2245 ///
2246 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2247 /// request for the edited ranges, which will be processed when the buffer finishes
2248 /// parsing.
2249 ///
2250 /// Parsing takes place at the end of a transaction, and may compute synchronously
2251 /// or asynchronously, depending on the changes.
2252 pub fn edit<I, S, T>(
2253 &mut self,
2254 edits_iter: I,
2255 autoindent_mode: Option<AutoindentMode>,
2256 cx: &mut Context<Self>,
2257 ) -> Option<clock::Lamport>
2258 where
2259 I: IntoIterator<Item = (Range<S>, T)>,
2260 S: ToOffset,
2261 T: Into<Arc<str>>,
2262 {
2263 // Skip invalid edits and coalesce contiguous ones.
2264 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2265
2266 for (range, new_text) in edits_iter {
2267 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2268
2269 if range.start > range.end {
2270 mem::swap(&mut range.start, &mut range.end);
2271 }
2272 let new_text = new_text.into();
2273 if !new_text.is_empty() || !range.is_empty() {
2274 if let Some((prev_range, prev_text)) = edits.last_mut() {
2275 if prev_range.end >= range.start {
2276 prev_range.end = cmp::max(prev_range.end, range.end);
2277 *prev_text = format!("{prev_text}{new_text}").into();
2278 } else {
2279 edits.push((range, new_text));
2280 }
2281 } else {
2282 edits.push((range, new_text));
2283 }
2284 }
2285 }
2286 if edits.is_empty() {
2287 return None;
2288 }
2289
2290 self.start_transaction();
2291 self.pending_autoindent.take();
2292 let autoindent_request = autoindent_mode
2293 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2294
2295 let edit_operation = self.text.edit(edits.iter().cloned());
2296 let edit_id = edit_operation.timestamp();
2297
2298 if let Some((before_edit, mode)) = autoindent_request {
2299 let mut delta = 0isize;
2300 let entries = edits
2301 .into_iter()
2302 .enumerate()
2303 .zip(&edit_operation.as_edit().unwrap().new_text)
2304 .map(|((ix, (range, _)), new_text)| {
2305 let new_text_length = new_text.len();
2306 let old_start = range.start.to_point(&before_edit);
2307 let new_start = (delta + range.start as isize) as usize;
2308 let range_len = range.end - range.start;
2309 delta += new_text_length as isize - range_len as isize;
2310
2311 // Decide what range of the insertion to auto-indent, and whether
2312 // the first line of the insertion should be considered a newly-inserted line
2313 // or an edit to an existing line.
2314 let mut range_of_insertion_to_indent = 0..new_text_length;
2315 let mut first_line_is_new = true;
2316
2317 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2318 let old_line_end = before_edit.line_len(old_start.row);
2319
2320 if old_start.column > old_line_start {
2321 first_line_is_new = false;
2322 }
2323
2324 if !new_text.contains('\n')
2325 && (old_start.column + (range_len as u32) < old_line_end
2326 || old_line_end == old_line_start)
2327 {
2328 first_line_is_new = false;
2329 }
2330
2331 // When inserting text starting with a newline, avoid auto-indenting the
2332 // previous line.
2333 if new_text.starts_with('\n') {
2334 range_of_insertion_to_indent.start += 1;
2335 first_line_is_new = true;
2336 }
2337
2338 let mut original_indent_column = None;
2339 if let AutoindentMode::Block {
2340 original_indent_columns,
2341 } = &mode
2342 {
2343 original_indent_column = Some(if new_text.starts_with('\n') {
2344 indent_size_for_text(
2345 new_text[range_of_insertion_to_indent.clone()].chars(),
2346 )
2347 .len
2348 } else {
2349 original_indent_columns
2350 .get(ix)
2351 .copied()
2352 .flatten()
2353 .unwrap_or_else(|| {
2354 indent_size_for_text(
2355 new_text[range_of_insertion_to_indent.clone()].chars(),
2356 )
2357 .len
2358 })
2359 });
2360
2361 // Avoid auto-indenting the line after the edit.
2362 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2363 range_of_insertion_to_indent.end -= 1;
2364 }
2365 }
2366
2367 AutoindentRequestEntry {
2368 first_line_is_new,
2369 original_indent_column,
2370 indent_size: before_edit.language_indent_size_at(range.start, cx),
2371 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2372 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2373 }
2374 })
2375 .collect();
2376
2377 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2378 before_edit,
2379 entries,
2380 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2381 ignore_empty_lines: false,
2382 }));
2383 }
2384
2385 self.end_transaction(cx);
2386 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2387 Some(edit_id)
2388 }
2389
2390 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2391 self.was_changed();
2392
2393 if self.edits_since::<usize>(old_version).next().is_none() {
2394 return;
2395 }
2396
2397 self.reparse(cx);
2398 cx.emit(BufferEvent::Edited);
2399 if was_dirty != self.is_dirty() {
2400 cx.emit(BufferEvent::DirtyChanged);
2401 }
2402 cx.notify();
2403 }
2404
2405 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2406 where
2407 I: IntoIterator<Item = Range<T>>,
2408 T: ToOffset + Copy,
2409 {
2410 let before_edit = self.snapshot();
2411 let entries = ranges
2412 .into_iter()
2413 .map(|range| AutoindentRequestEntry {
2414 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2415 first_line_is_new: true,
2416 indent_size: before_edit.language_indent_size_at(range.start, cx),
2417 original_indent_column: None,
2418 })
2419 .collect();
2420 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2421 before_edit,
2422 entries,
2423 is_block_mode: false,
2424 ignore_empty_lines: true,
2425 }));
2426 self.request_autoindent(cx);
2427 }
2428
2429 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2430 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2431 pub fn insert_empty_line(
2432 &mut self,
2433 position: impl ToPoint,
2434 space_above: bool,
2435 space_below: bool,
2436 cx: &mut Context<Self>,
2437 ) -> Point {
2438 let mut position = position.to_point(self);
2439
2440 self.start_transaction();
2441
2442 self.edit(
2443 [(position..position, "\n")],
2444 Some(AutoindentMode::EachLine),
2445 cx,
2446 );
2447
2448 if position.column > 0 {
2449 position += Point::new(1, 0);
2450 }
2451
2452 if !self.is_line_blank(position.row) {
2453 self.edit(
2454 [(position..position, "\n")],
2455 Some(AutoindentMode::EachLine),
2456 cx,
2457 );
2458 }
2459
2460 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2461 self.edit(
2462 [(position..position, "\n")],
2463 Some(AutoindentMode::EachLine),
2464 cx,
2465 );
2466 position.row += 1;
2467 }
2468
2469 if space_below
2470 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2471 {
2472 self.edit(
2473 [(position..position, "\n")],
2474 Some(AutoindentMode::EachLine),
2475 cx,
2476 );
2477 }
2478
2479 self.end_transaction(cx);
2480
2481 position
2482 }
2483
2484 /// Applies the given remote operations to the buffer.
2485 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2486 self.pending_autoindent.take();
2487 let was_dirty = self.is_dirty();
2488 let old_version = self.version.clone();
2489 let mut deferred_ops = Vec::new();
2490 let buffer_ops = ops
2491 .into_iter()
2492 .filter_map(|op| match op {
2493 Operation::Buffer(op) => Some(op),
2494 _ => {
2495 if self.can_apply_op(&op) {
2496 self.apply_op(op, cx);
2497 } else {
2498 deferred_ops.push(op);
2499 }
2500 None
2501 }
2502 })
2503 .collect::<Vec<_>>();
2504 for operation in buffer_ops.iter() {
2505 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2506 }
2507 self.text.apply_ops(buffer_ops);
2508 self.deferred_ops.insert(deferred_ops);
2509 self.flush_deferred_ops(cx);
2510 self.did_edit(&old_version, was_dirty, cx);
2511 // Notify independently of whether the buffer was edited as the operations could include a
2512 // selection update.
2513 cx.notify();
2514 }
2515
2516 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2517 let mut deferred_ops = Vec::new();
2518 for op in self.deferred_ops.drain().iter().cloned() {
2519 if self.can_apply_op(&op) {
2520 self.apply_op(op, cx);
2521 } else {
2522 deferred_ops.push(op);
2523 }
2524 }
2525 self.deferred_ops.insert(deferred_ops);
2526 }
2527
2528 pub fn has_deferred_ops(&self) -> bool {
2529 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2530 }
2531
2532 fn can_apply_op(&self, operation: &Operation) -> bool {
2533 match operation {
2534 Operation::Buffer(_) => {
2535 unreachable!("buffer operations should never be applied at this layer")
2536 }
2537 Operation::UpdateDiagnostics {
2538 diagnostics: diagnostic_set,
2539 ..
2540 } => diagnostic_set.iter().all(|diagnostic| {
2541 self.text.can_resolve(&diagnostic.range.start)
2542 && self.text.can_resolve(&diagnostic.range.end)
2543 }),
2544 Operation::UpdateSelections { selections, .. } => selections
2545 .iter()
2546 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2547 Operation::UpdateCompletionTriggers { .. } => true,
2548 }
2549 }
2550
2551 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2552 match operation {
2553 Operation::Buffer(_) => {
2554 unreachable!("buffer operations should never be applied at this layer")
2555 }
2556 Operation::UpdateDiagnostics {
2557 server_id,
2558 diagnostics: diagnostic_set,
2559 lamport_timestamp,
2560 } => {
2561 let snapshot = self.snapshot();
2562 self.apply_diagnostic_update(
2563 server_id,
2564 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2565 lamport_timestamp,
2566 cx,
2567 );
2568 }
2569 Operation::UpdateSelections {
2570 selections,
2571 lamport_timestamp,
2572 line_mode,
2573 cursor_shape,
2574 } => {
2575 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2576 if set.lamport_timestamp > lamport_timestamp {
2577 return;
2578 }
2579 }
2580
2581 self.remote_selections.insert(
2582 lamport_timestamp.replica_id,
2583 SelectionSet {
2584 selections,
2585 lamport_timestamp,
2586 line_mode,
2587 cursor_shape,
2588 },
2589 );
2590 self.text.lamport_clock.observe(lamport_timestamp);
2591 self.non_text_state_update_count += 1;
2592 }
2593 Operation::UpdateCompletionTriggers {
2594 triggers,
2595 lamport_timestamp,
2596 server_id,
2597 } => {
2598 if triggers.is_empty() {
2599 self.completion_triggers_per_language_server
2600 .remove(&server_id);
2601 self.completion_triggers = self
2602 .completion_triggers_per_language_server
2603 .values()
2604 .flat_map(|triggers| triggers.into_iter().cloned())
2605 .collect();
2606 } else {
2607 self.completion_triggers_per_language_server
2608 .insert(server_id, triggers.iter().cloned().collect());
2609 self.completion_triggers.extend(triggers);
2610 }
2611 self.text.lamport_clock.observe(lamport_timestamp);
2612 }
2613 }
2614 }
2615
2616 fn apply_diagnostic_update(
2617 &mut self,
2618 server_id: LanguageServerId,
2619 diagnostics: DiagnosticSet,
2620 lamport_timestamp: clock::Lamport,
2621 cx: &mut Context<Self>,
2622 ) {
2623 if lamport_timestamp > self.diagnostics_timestamp {
2624 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2625 if diagnostics.is_empty() {
2626 if let Ok(ix) = ix {
2627 self.diagnostics.remove(ix);
2628 }
2629 } else {
2630 match ix {
2631 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2632 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2633 };
2634 }
2635 self.diagnostics_timestamp = lamport_timestamp;
2636 self.non_text_state_update_count += 1;
2637 self.text.lamport_clock.observe(lamport_timestamp);
2638 cx.notify();
2639 cx.emit(BufferEvent::DiagnosticsUpdated);
2640 }
2641 }
2642
2643 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2644 self.was_changed();
2645 cx.emit(BufferEvent::Operation {
2646 operation,
2647 is_local,
2648 });
2649 }
2650
2651 /// Removes the selections for a given peer.
2652 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2653 self.remote_selections.remove(&replica_id);
2654 cx.notify();
2655 }
2656
2657 /// Undoes the most recent transaction.
2658 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2659 let was_dirty = self.is_dirty();
2660 let old_version = self.version.clone();
2661
2662 if let Some((transaction_id, operation)) = self.text.undo() {
2663 self.send_operation(Operation::Buffer(operation), true, cx);
2664 self.did_edit(&old_version, was_dirty, cx);
2665 Some(transaction_id)
2666 } else {
2667 None
2668 }
2669 }
2670
2671 /// Manually undoes a specific transaction in the buffer's undo history.
2672 pub fn undo_transaction(
2673 &mut self,
2674 transaction_id: TransactionId,
2675 cx: &mut Context<Self>,
2676 ) -> bool {
2677 let was_dirty = self.is_dirty();
2678 let old_version = self.version.clone();
2679 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2680 self.send_operation(Operation::Buffer(operation), true, cx);
2681 self.did_edit(&old_version, was_dirty, cx);
2682 true
2683 } else {
2684 false
2685 }
2686 }
2687
2688 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2689 pub fn undo_to_transaction(
2690 &mut self,
2691 transaction_id: TransactionId,
2692 cx: &mut Context<Self>,
2693 ) -> bool {
2694 let was_dirty = self.is_dirty();
2695 let old_version = self.version.clone();
2696
2697 let operations = self.text.undo_to_transaction(transaction_id);
2698 let undone = !operations.is_empty();
2699 for operation in operations {
2700 self.send_operation(Operation::Buffer(operation), true, cx);
2701 }
2702 if undone {
2703 self.did_edit(&old_version, was_dirty, cx)
2704 }
2705 undone
2706 }
2707
2708 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2709 let was_dirty = self.is_dirty();
2710 let operation = self.text.undo_operations(counts);
2711 let old_version = self.version.clone();
2712 self.send_operation(Operation::Buffer(operation), true, cx);
2713 self.did_edit(&old_version, was_dirty, cx);
2714 }
2715
2716 /// Manually redoes a specific transaction in the buffer's redo history.
2717 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2718 let was_dirty = self.is_dirty();
2719 let old_version = self.version.clone();
2720
2721 if let Some((transaction_id, operation)) = self.text.redo() {
2722 self.send_operation(Operation::Buffer(operation), true, cx);
2723 self.did_edit(&old_version, was_dirty, cx);
2724 Some(transaction_id)
2725 } else {
2726 None
2727 }
2728 }
2729
2730 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2731 pub fn redo_to_transaction(
2732 &mut self,
2733 transaction_id: TransactionId,
2734 cx: &mut Context<Self>,
2735 ) -> bool {
2736 let was_dirty = self.is_dirty();
2737 let old_version = self.version.clone();
2738
2739 let operations = self.text.redo_to_transaction(transaction_id);
2740 let redone = !operations.is_empty();
2741 for operation in operations {
2742 self.send_operation(Operation::Buffer(operation), true, cx);
2743 }
2744 if redone {
2745 self.did_edit(&old_version, was_dirty, cx)
2746 }
2747 redone
2748 }
2749
2750 /// Override current completion triggers with the user-provided completion triggers.
2751 pub fn set_completion_triggers(
2752 &mut self,
2753 server_id: LanguageServerId,
2754 triggers: BTreeSet<String>,
2755 cx: &mut Context<Self>,
2756 ) {
2757 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2758 if triggers.is_empty() {
2759 self.completion_triggers_per_language_server
2760 .remove(&server_id);
2761 self.completion_triggers = self
2762 .completion_triggers_per_language_server
2763 .values()
2764 .flat_map(|triggers| triggers.into_iter().cloned())
2765 .collect();
2766 } else {
2767 self.completion_triggers_per_language_server
2768 .insert(server_id, triggers.clone());
2769 self.completion_triggers.extend(triggers.iter().cloned());
2770 }
2771 self.send_operation(
2772 Operation::UpdateCompletionTriggers {
2773 triggers: triggers.into_iter().collect(),
2774 lamport_timestamp: self.completion_triggers_timestamp,
2775 server_id,
2776 },
2777 true,
2778 cx,
2779 );
2780 cx.notify();
2781 }
2782
2783 /// Returns a list of strings which trigger a completion menu for this language.
2784 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2785 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2786 &self.completion_triggers
2787 }
2788
2789 /// Call this directly after performing edits to prevent the preview tab
2790 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2791 /// to return false until there are additional edits.
2792 pub fn refresh_preview(&mut self) {
2793 self.preview_version = self.version.clone();
2794 }
2795
2796 /// Whether we should preserve the preview status of a tab containing this buffer.
2797 pub fn preserve_preview(&self) -> bool {
2798 !self.has_edits_since(&self.preview_version)
2799 }
2800}
2801
2802#[doc(hidden)]
2803#[cfg(any(test, feature = "test-support"))]
2804impl Buffer {
2805 pub fn edit_via_marked_text(
2806 &mut self,
2807 marked_string: &str,
2808 autoindent_mode: Option<AutoindentMode>,
2809 cx: &mut Context<Self>,
2810 ) {
2811 let edits = self.edits_for_marked_text(marked_string);
2812 self.edit(edits, autoindent_mode, cx);
2813 }
2814
2815 pub fn set_group_interval(&mut self, group_interval: Duration) {
2816 self.text.set_group_interval(group_interval);
2817 }
2818
2819 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2820 where
2821 T: rand::Rng,
2822 {
2823 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2824 let mut last_end = None;
2825 for _ in 0..old_range_count {
2826 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2827 break;
2828 }
2829
2830 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2831 let mut range = self.random_byte_range(new_start, rng);
2832 if rng.gen_bool(0.2) {
2833 mem::swap(&mut range.start, &mut range.end);
2834 }
2835 last_end = Some(range.end);
2836
2837 let new_text_len = rng.gen_range(0..10);
2838 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2839 new_text = new_text.to_uppercase();
2840
2841 edits.push((range, new_text));
2842 }
2843 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2844 self.edit(edits, None, cx);
2845 }
2846
2847 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2848 let was_dirty = self.is_dirty();
2849 let old_version = self.version.clone();
2850
2851 let ops = self.text.randomly_undo_redo(rng);
2852 if !ops.is_empty() {
2853 for op in ops {
2854 self.send_operation(Operation::Buffer(op), true, cx);
2855 self.did_edit(&old_version, was_dirty, cx);
2856 }
2857 }
2858 }
2859}
2860
2861impl EventEmitter<BufferEvent> for Buffer {}
2862
2863impl Deref for Buffer {
2864 type Target = TextBuffer;
2865
2866 fn deref(&self) -> &Self::Target {
2867 &self.text
2868 }
2869}
2870
2871impl BufferSnapshot {
2872 /// Returns [`IndentSize`] for a given line that respects user settings and
2873 /// language preferences.
2874 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2875 indent_size_for_line(self, row)
2876 }
2877
2878 /// Returns [`IndentSize`] for a given position that respects user settings
2879 /// and language preferences.
2880 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2881 let settings = language_settings(
2882 self.language_at(position).map(|l| l.name()),
2883 self.file(),
2884 cx,
2885 );
2886 if settings.hard_tabs {
2887 IndentSize::tab()
2888 } else {
2889 IndentSize::spaces(settings.tab_size.get())
2890 }
2891 }
2892
2893 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2894 /// is passed in as `single_indent_size`.
2895 pub fn suggested_indents(
2896 &self,
2897 rows: impl Iterator<Item = u32>,
2898 single_indent_size: IndentSize,
2899 ) -> BTreeMap<u32, IndentSize> {
2900 let mut result = BTreeMap::new();
2901
2902 for row_range in contiguous_ranges(rows, 10) {
2903 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2904 Some(suggestions) => suggestions,
2905 _ => break,
2906 };
2907
2908 for (row, suggestion) in row_range.zip(suggestions) {
2909 let indent_size = if let Some(suggestion) = suggestion {
2910 result
2911 .get(&suggestion.basis_row)
2912 .copied()
2913 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2914 .with_delta(suggestion.delta, single_indent_size)
2915 } else {
2916 self.indent_size_for_line(row)
2917 };
2918
2919 result.insert(row, indent_size);
2920 }
2921 }
2922
2923 result
2924 }
2925
2926 fn suggest_autoindents(
2927 &self,
2928 row_range: Range<u32>,
2929 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2930 let config = &self.language.as_ref()?.config;
2931 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2932
2933 #[derive(Debug, Clone)]
2934 struct StartPosition {
2935 start: Point,
2936 suffix: SharedString,
2937 }
2938
2939 // Find the suggested indentation ranges based on the syntax tree.
2940 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2941 let end = Point::new(row_range.end, 0);
2942 let range = (start..end).to_offset(&self.text);
2943 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2944 Some(&grammar.indents_config.as_ref()?.query)
2945 });
2946 let indent_configs = matches
2947 .grammars()
2948 .iter()
2949 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2950 .collect::<Vec<_>>();
2951
2952 let mut indent_ranges = Vec::<Range<Point>>::new();
2953 let mut start_positions = Vec::<StartPosition>::new();
2954 let mut outdent_positions = Vec::<Point>::new();
2955 while let Some(mat) = matches.peek() {
2956 let mut start: Option<Point> = None;
2957 let mut end: Option<Point> = None;
2958
2959 let config = indent_configs[mat.grammar_index];
2960 for capture in mat.captures {
2961 if capture.index == config.indent_capture_ix {
2962 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2963 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2964 } else if Some(capture.index) == config.start_capture_ix {
2965 start = Some(Point::from_ts_point(capture.node.end_position()));
2966 } else if Some(capture.index) == config.end_capture_ix {
2967 end = Some(Point::from_ts_point(capture.node.start_position()));
2968 } else if Some(capture.index) == config.outdent_capture_ix {
2969 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2970 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2971 start_positions.push(StartPosition {
2972 start: Point::from_ts_point(capture.node.start_position()),
2973 suffix: suffix.clone(),
2974 });
2975 }
2976 }
2977
2978 matches.advance();
2979 if let Some((start, end)) = start.zip(end) {
2980 if start.row == end.row {
2981 continue;
2982 }
2983 let range = start..end;
2984 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2985 Err(ix) => indent_ranges.insert(ix, range),
2986 Ok(ix) => {
2987 let prev_range = &mut indent_ranges[ix];
2988 prev_range.end = prev_range.end.max(range.end);
2989 }
2990 }
2991 }
2992 }
2993
2994 let mut error_ranges = Vec::<Range<Point>>::new();
2995 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2996 grammar.error_query.as_ref()
2997 });
2998 while let Some(mat) = matches.peek() {
2999 let node = mat.captures[0].node;
3000 let start = Point::from_ts_point(node.start_position());
3001 let end = Point::from_ts_point(node.end_position());
3002 let range = start..end;
3003 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3004 Ok(ix) | Err(ix) => ix,
3005 };
3006 let mut end_ix = ix;
3007 while let Some(existing_range) = error_ranges.get(end_ix) {
3008 if existing_range.end < end {
3009 end_ix += 1;
3010 } else {
3011 break;
3012 }
3013 }
3014 error_ranges.splice(ix..end_ix, [range]);
3015 matches.advance();
3016 }
3017
3018 outdent_positions.sort();
3019 for outdent_position in outdent_positions {
3020 // find the innermost indent range containing this outdent_position
3021 // set its end to the outdent position
3022 if let Some(range_to_truncate) = indent_ranges
3023 .iter_mut()
3024 .filter(|indent_range| indent_range.contains(&outdent_position))
3025 .next_back()
3026 {
3027 range_to_truncate.end = outdent_position;
3028 }
3029 }
3030
3031 start_positions.sort_by_key(|b| b.start);
3032
3033 // Find the suggested indentation increases and decreased based on regexes.
3034 let mut regex_outdent_map = HashMap::default();
3035 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3036 let mut start_positions_iter = start_positions.iter().peekable();
3037
3038 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3039 self.for_each_line(
3040 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3041 ..Point::new(row_range.end, 0),
3042 |row, line| {
3043 if config
3044 .decrease_indent_pattern
3045 .as_ref()
3046 .map_or(false, |regex| regex.is_match(line))
3047 {
3048 indent_change_rows.push((row, Ordering::Less));
3049 }
3050 if config
3051 .increase_indent_pattern
3052 .as_ref()
3053 .map_or(false, |regex| regex.is_match(line))
3054 {
3055 indent_change_rows.push((row + 1, Ordering::Greater));
3056 }
3057 while let Some(pos) = start_positions_iter.peek() {
3058 if pos.start.row < row {
3059 let pos = start_positions_iter.next().unwrap();
3060 last_seen_suffix
3061 .entry(pos.suffix.to_string())
3062 .or_default()
3063 .push(pos.start);
3064 } else {
3065 break;
3066 }
3067 }
3068 for rule in &config.decrease_indent_patterns {
3069 if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) {
3070 let row_start_column = self.indent_size_for_line(row).len;
3071 let basis_row = rule
3072 .valid_after
3073 .iter()
3074 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3075 .flatten()
3076 .filter(|start_point| start_point.column <= row_start_column)
3077 .max_by_key(|start_point| start_point.row);
3078 if let Some(outdent_to_row) = basis_row {
3079 regex_outdent_map.insert(row, outdent_to_row.row);
3080 }
3081 break;
3082 }
3083 }
3084 },
3085 );
3086
3087 let mut indent_changes = indent_change_rows.into_iter().peekable();
3088 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3089 prev_non_blank_row.unwrap_or(0)
3090 } else {
3091 row_range.start.saturating_sub(1)
3092 };
3093
3094 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3095 Some(row_range.map(move |row| {
3096 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3097
3098 let mut indent_from_prev_row = false;
3099 let mut outdent_from_prev_row = false;
3100 let mut outdent_to_row = u32::MAX;
3101 let mut from_regex = false;
3102
3103 while let Some((indent_row, delta)) = indent_changes.peek() {
3104 match indent_row.cmp(&row) {
3105 Ordering::Equal => match delta {
3106 Ordering::Less => {
3107 from_regex = true;
3108 outdent_from_prev_row = true
3109 }
3110 Ordering::Greater => {
3111 indent_from_prev_row = true;
3112 from_regex = true
3113 }
3114 _ => {}
3115 },
3116
3117 Ordering::Greater => break,
3118 Ordering::Less => {}
3119 }
3120
3121 indent_changes.next();
3122 }
3123
3124 for range in &indent_ranges {
3125 if range.start.row >= row {
3126 break;
3127 }
3128 if range.start.row == prev_row && range.end > row_start {
3129 indent_from_prev_row = true;
3130 }
3131 if range.end > prev_row_start && range.end <= row_start {
3132 outdent_to_row = outdent_to_row.min(range.start.row);
3133 }
3134 }
3135
3136 if let Some(basis_row) = regex_outdent_map.get(&row) {
3137 indent_from_prev_row = false;
3138 outdent_to_row = *basis_row;
3139 from_regex = true;
3140 }
3141
3142 let within_error = error_ranges
3143 .iter()
3144 .any(|e| e.start.row < row && e.end > row_start);
3145
3146 let suggestion = if outdent_to_row == prev_row
3147 || (outdent_from_prev_row && indent_from_prev_row)
3148 {
3149 Some(IndentSuggestion {
3150 basis_row: prev_row,
3151 delta: Ordering::Equal,
3152 within_error: within_error && !from_regex,
3153 })
3154 } else if indent_from_prev_row {
3155 Some(IndentSuggestion {
3156 basis_row: prev_row,
3157 delta: Ordering::Greater,
3158 within_error: within_error && !from_regex,
3159 })
3160 } else if outdent_to_row < prev_row {
3161 Some(IndentSuggestion {
3162 basis_row: outdent_to_row,
3163 delta: Ordering::Equal,
3164 within_error: within_error && !from_regex,
3165 })
3166 } else if outdent_from_prev_row {
3167 Some(IndentSuggestion {
3168 basis_row: prev_row,
3169 delta: Ordering::Less,
3170 within_error: within_error && !from_regex,
3171 })
3172 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3173 {
3174 Some(IndentSuggestion {
3175 basis_row: prev_row,
3176 delta: Ordering::Equal,
3177 within_error: within_error && !from_regex,
3178 })
3179 } else {
3180 None
3181 };
3182
3183 prev_row = row;
3184 prev_row_start = row_start;
3185 suggestion
3186 }))
3187 }
3188
3189 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3190 while row > 0 {
3191 row -= 1;
3192 if !self.is_line_blank(row) {
3193 return Some(row);
3194 }
3195 }
3196 None
3197 }
3198
3199 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3200 let captures = self.syntax.captures(range, &self.text, |grammar| {
3201 grammar.highlights_query.as_ref()
3202 });
3203 let highlight_maps = captures
3204 .grammars()
3205 .iter()
3206 .map(|grammar| grammar.highlight_map())
3207 .collect();
3208 (captures, highlight_maps)
3209 }
3210
3211 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3212 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3213 /// returned in chunks where each chunk has a single syntax highlighting style and
3214 /// diagnostic status.
3215 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3216 let range = range.start.to_offset(self)..range.end.to_offset(self);
3217
3218 let mut syntax = None;
3219 if language_aware {
3220 syntax = Some(self.get_highlights(range.clone()));
3221 }
3222 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3223 let diagnostics = language_aware;
3224 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3225 }
3226
3227 pub fn highlighted_text_for_range<T: ToOffset>(
3228 &self,
3229 range: Range<T>,
3230 override_style: Option<HighlightStyle>,
3231 syntax_theme: &SyntaxTheme,
3232 ) -> HighlightedText {
3233 HighlightedText::from_buffer_range(
3234 range,
3235 &self.text,
3236 &self.syntax,
3237 override_style,
3238 syntax_theme,
3239 )
3240 }
3241
3242 /// Invokes the given callback for each line of text in the given range of the buffer.
3243 /// Uses callback to avoid allocating a string for each line.
3244 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3245 let mut line = String::new();
3246 let mut row = range.start.row;
3247 for chunk in self
3248 .as_rope()
3249 .chunks_in_range(range.to_offset(self))
3250 .chain(["\n"])
3251 {
3252 for (newline_ix, text) in chunk.split('\n').enumerate() {
3253 if newline_ix > 0 {
3254 callback(row, &line);
3255 row += 1;
3256 line.clear();
3257 }
3258 line.push_str(text);
3259 }
3260 }
3261 }
3262
3263 /// Iterates over every [`SyntaxLayer`] in the buffer.
3264 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3265 self.syntax
3266 .layers_for_range(0..self.len(), &self.text, true)
3267 }
3268
3269 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3270 let offset = position.to_offset(self);
3271 self.syntax
3272 .layers_for_range(offset..offset, &self.text, false)
3273 .filter(|l| l.node().end_byte() > offset)
3274 .last()
3275 }
3276
3277 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3278 &self,
3279 range: Range<D>,
3280 ) -> Option<SyntaxLayer<'_>> {
3281 let range = range.to_offset(self);
3282 return self
3283 .syntax
3284 .layers_for_range(range, &self.text, false)
3285 .max_by(|a, b| {
3286 if a.depth != b.depth {
3287 a.depth.cmp(&b.depth)
3288 } else if a.offset.0 != b.offset.0 {
3289 a.offset.0.cmp(&b.offset.0)
3290 } else {
3291 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3292 }
3293 });
3294 }
3295
3296 /// Returns the main [`Language`].
3297 pub fn language(&self) -> Option<&Arc<Language>> {
3298 self.language.as_ref()
3299 }
3300
3301 /// Returns the [`Language`] at the given location.
3302 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3303 self.syntax_layer_at(position)
3304 .map(|info| info.language)
3305 .or(self.language.as_ref())
3306 }
3307
3308 /// Returns the settings for the language at the given location.
3309 pub fn settings_at<'a, D: ToOffset>(
3310 &'a self,
3311 position: D,
3312 cx: &'a App,
3313 ) -> Cow<'a, LanguageSettings> {
3314 language_settings(
3315 self.language_at(position).map(|l| l.name()),
3316 self.file.as_ref(),
3317 cx,
3318 )
3319 }
3320
3321 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3322 CharClassifier::new(self.language_scope_at(point))
3323 }
3324
3325 /// Returns the [`LanguageScope`] at the given location.
3326 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3327 let offset = position.to_offset(self);
3328 let mut scope = None;
3329 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3330
3331 // Use the layer that has the smallest node intersecting the given point.
3332 for layer in self
3333 .syntax
3334 .layers_for_range(offset..offset, &self.text, false)
3335 {
3336 let mut cursor = layer.node().walk();
3337
3338 let mut range = None;
3339 loop {
3340 let child_range = cursor.node().byte_range();
3341 if !child_range.contains(&offset) {
3342 break;
3343 }
3344
3345 range = Some(child_range);
3346 if cursor.goto_first_child_for_byte(offset).is_none() {
3347 break;
3348 }
3349 }
3350
3351 if let Some(range) = range {
3352 if smallest_range_and_depth.as_ref().map_or(
3353 true,
3354 |(smallest_range, smallest_range_depth)| {
3355 if layer.depth > *smallest_range_depth {
3356 true
3357 } else if layer.depth == *smallest_range_depth {
3358 range.len() < smallest_range.len()
3359 } else {
3360 false
3361 }
3362 },
3363 ) {
3364 smallest_range_and_depth = Some((range, layer.depth));
3365 scope = Some(LanguageScope {
3366 language: layer.language.clone(),
3367 override_id: layer.override_id(offset, &self.text),
3368 });
3369 }
3370 }
3371 }
3372
3373 scope.or_else(|| {
3374 self.language.clone().map(|language| LanguageScope {
3375 language,
3376 override_id: None,
3377 })
3378 })
3379 }
3380
3381 /// Returns a tuple of the range and character kind of the word
3382 /// surrounding the given position.
3383 pub fn surrounding_word<T: ToOffset>(
3384 &self,
3385 start: T,
3386 for_completion: bool,
3387 ) -> (Range<usize>, Option<CharKind>) {
3388 let mut start = start.to_offset(self);
3389 let mut end = start;
3390 let mut next_chars = self.chars_at(start).take(128).peekable();
3391 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3392
3393 let classifier = self
3394 .char_classifier_at(start)
3395 .for_completion(for_completion);
3396 let word_kind = cmp::max(
3397 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3398 next_chars.peek().copied().map(|c| classifier.kind(c)),
3399 );
3400
3401 for ch in prev_chars {
3402 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3403 start -= ch.len_utf8();
3404 } else {
3405 break;
3406 }
3407 }
3408
3409 for ch in next_chars {
3410 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3411 end += ch.len_utf8();
3412 } else {
3413 break;
3414 }
3415 }
3416
3417 (start..end, word_kind)
3418 }
3419
3420 /// Returns the closest syntax node enclosing the given range.
3421 pub fn syntax_ancestor<'a, T: ToOffset>(
3422 &'a self,
3423 range: Range<T>,
3424 ) -> Option<tree_sitter::Node<'a>> {
3425 let range = range.start.to_offset(self)..range.end.to_offset(self);
3426 let mut result: Option<tree_sitter::Node<'a>> = None;
3427 'outer: for layer in self
3428 .syntax
3429 .layers_for_range(range.clone(), &self.text, true)
3430 {
3431 let mut cursor = layer.node().walk();
3432
3433 // Descend to the first leaf that touches the start of the range.
3434 //
3435 // If the range is non-empty and the current node ends exactly at the start,
3436 // move to the next sibling to find a node that extends beyond the start.
3437 //
3438 // If the range is empty and the current node starts after the range position,
3439 // move to the previous sibling to find the node that contains the position.
3440 while cursor.goto_first_child_for_byte(range.start).is_some() {
3441 if !range.is_empty() && cursor.node().end_byte() == range.start {
3442 cursor.goto_next_sibling();
3443 }
3444 if range.is_empty() && cursor.node().start_byte() > range.start {
3445 cursor.goto_previous_sibling();
3446 }
3447 }
3448
3449 // Ascend to the smallest ancestor that strictly contains the range.
3450 loop {
3451 let node_range = cursor.node().byte_range();
3452 if node_range.start <= range.start
3453 && node_range.end >= range.end
3454 && node_range.len() > range.len()
3455 {
3456 break;
3457 }
3458 if !cursor.goto_parent() {
3459 continue 'outer;
3460 }
3461 }
3462
3463 let left_node = cursor.node();
3464 let mut layer_result = left_node;
3465
3466 // For an empty range, try to find another node immediately to the right of the range.
3467 if left_node.end_byte() == range.start {
3468 let mut right_node = None;
3469 while !cursor.goto_next_sibling() {
3470 if !cursor.goto_parent() {
3471 break;
3472 }
3473 }
3474
3475 while cursor.node().start_byte() == range.start {
3476 right_node = Some(cursor.node());
3477 if !cursor.goto_first_child() {
3478 break;
3479 }
3480 }
3481
3482 // If there is a candidate node on both sides of the (empty) range, then
3483 // decide between the two by favoring a named node over an anonymous token.
3484 // If both nodes are the same in that regard, favor the right one.
3485 if let Some(right_node) = right_node {
3486 if right_node.is_named() || !left_node.is_named() {
3487 layer_result = right_node;
3488 }
3489 }
3490 }
3491
3492 if let Some(previous_result) = &result {
3493 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3494 continue;
3495 }
3496 }
3497 result = Some(layer_result);
3498 }
3499
3500 result
3501 }
3502
3503 /// Returns the root syntax node within the given row
3504 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3505 let start_offset = position.to_offset(self);
3506
3507 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3508
3509 let layer = self
3510 .syntax
3511 .layers_for_range(start_offset..start_offset, &self.text, true)
3512 .next()?;
3513
3514 let mut cursor = layer.node().walk();
3515
3516 // Descend to the first leaf that touches the start of the range.
3517 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3518 if cursor.node().end_byte() == start_offset {
3519 cursor.goto_next_sibling();
3520 }
3521 }
3522
3523 // Ascend to the root node within the same row.
3524 while cursor.goto_parent() {
3525 if cursor.node().start_position().row != row {
3526 break;
3527 }
3528 }
3529
3530 return Some(cursor.node());
3531 }
3532
3533 /// Returns the outline for the buffer.
3534 ///
3535 /// This method allows passing an optional [`SyntaxTheme`] to
3536 /// syntax-highlight the returned symbols.
3537 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3538 self.outline_items_containing(0..self.len(), true, theme)
3539 .map(Outline::new)
3540 }
3541
3542 /// Returns all the symbols that contain the given position.
3543 ///
3544 /// This method allows passing an optional [`SyntaxTheme`] to
3545 /// syntax-highlight the returned symbols.
3546 pub fn symbols_containing<T: ToOffset>(
3547 &self,
3548 position: T,
3549 theme: Option<&SyntaxTheme>,
3550 ) -> Option<Vec<OutlineItem<Anchor>>> {
3551 let position = position.to_offset(self);
3552 let mut items = self.outline_items_containing(
3553 position.saturating_sub(1)..self.len().min(position + 1),
3554 false,
3555 theme,
3556 )?;
3557 let mut prev_depth = None;
3558 items.retain(|item| {
3559 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3560 prev_depth = Some(item.depth);
3561 result
3562 });
3563 Some(items)
3564 }
3565
3566 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3567 let range = range.to_offset(self);
3568 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3569 grammar.outline_config.as_ref().map(|c| &c.query)
3570 });
3571 let configs = matches
3572 .grammars()
3573 .iter()
3574 .map(|g| g.outline_config.as_ref().unwrap())
3575 .collect::<Vec<_>>();
3576
3577 while let Some(mat) = matches.peek() {
3578 let config = &configs[mat.grammar_index];
3579 let containing_item_node = maybe!({
3580 let item_node = mat.captures.iter().find_map(|cap| {
3581 if cap.index == config.item_capture_ix {
3582 Some(cap.node)
3583 } else {
3584 None
3585 }
3586 })?;
3587
3588 let item_byte_range = item_node.byte_range();
3589 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3590 None
3591 } else {
3592 Some(item_node)
3593 }
3594 });
3595
3596 if let Some(item_node) = containing_item_node {
3597 return Some(
3598 Point::from_ts_point(item_node.start_position())
3599 ..Point::from_ts_point(item_node.end_position()),
3600 );
3601 }
3602
3603 matches.advance();
3604 }
3605 None
3606 }
3607
3608 pub fn outline_items_containing<T: ToOffset>(
3609 &self,
3610 range: Range<T>,
3611 include_extra_context: bool,
3612 theme: Option<&SyntaxTheme>,
3613 ) -> Option<Vec<OutlineItem<Anchor>>> {
3614 let range = range.to_offset(self);
3615 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3616 grammar.outline_config.as_ref().map(|c| &c.query)
3617 });
3618 let configs = matches
3619 .grammars()
3620 .iter()
3621 .map(|g| g.outline_config.as_ref().unwrap())
3622 .collect::<Vec<_>>();
3623
3624 let mut items = Vec::new();
3625 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3626 while let Some(mat) = matches.peek() {
3627 let config = &configs[mat.grammar_index];
3628 if let Some(item) =
3629 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3630 {
3631 items.push(item);
3632 } else if let Some(capture) = mat
3633 .captures
3634 .iter()
3635 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3636 {
3637 let capture_range = capture.node.start_position()..capture.node.end_position();
3638 let mut capture_row_range =
3639 capture_range.start.row as u32..capture_range.end.row as u32;
3640 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3641 {
3642 capture_row_range.end -= 1;
3643 }
3644 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3645 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3646 last_row_range.end = capture_row_range.end;
3647 } else {
3648 annotation_row_ranges.push(capture_row_range);
3649 }
3650 } else {
3651 annotation_row_ranges.push(capture_row_range);
3652 }
3653 }
3654 matches.advance();
3655 }
3656
3657 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3658
3659 // Assign depths based on containment relationships and convert to anchors.
3660 let mut item_ends_stack = Vec::<Point>::new();
3661 let mut anchor_items = Vec::new();
3662 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3663 for item in items {
3664 while let Some(last_end) = item_ends_stack.last().copied() {
3665 if last_end < item.range.end {
3666 item_ends_stack.pop();
3667 } else {
3668 break;
3669 }
3670 }
3671
3672 let mut annotation_row_range = None;
3673 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3674 let row_preceding_item = item.range.start.row.saturating_sub(1);
3675 if next_annotation_row_range.end < row_preceding_item {
3676 annotation_row_ranges.next();
3677 } else {
3678 if next_annotation_row_range.end == row_preceding_item {
3679 annotation_row_range = Some(next_annotation_row_range.clone());
3680 annotation_row_ranges.next();
3681 }
3682 break;
3683 }
3684 }
3685
3686 anchor_items.push(OutlineItem {
3687 depth: item_ends_stack.len(),
3688 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3689 text: item.text,
3690 highlight_ranges: item.highlight_ranges,
3691 name_ranges: item.name_ranges,
3692 body_range: item.body_range.map(|body_range| {
3693 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3694 }),
3695 annotation_range: annotation_row_range.map(|annotation_range| {
3696 self.anchor_after(Point::new(annotation_range.start, 0))
3697 ..self.anchor_before(Point::new(
3698 annotation_range.end,
3699 self.line_len(annotation_range.end),
3700 ))
3701 }),
3702 });
3703 item_ends_stack.push(item.range.end);
3704 }
3705
3706 Some(anchor_items)
3707 }
3708
3709 fn next_outline_item(
3710 &self,
3711 config: &OutlineConfig,
3712 mat: &SyntaxMapMatch,
3713 range: &Range<usize>,
3714 include_extra_context: bool,
3715 theme: Option<&SyntaxTheme>,
3716 ) -> Option<OutlineItem<Point>> {
3717 let item_node = mat.captures.iter().find_map(|cap| {
3718 if cap.index == config.item_capture_ix {
3719 Some(cap.node)
3720 } else {
3721 None
3722 }
3723 })?;
3724
3725 let item_byte_range = item_node.byte_range();
3726 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3727 return None;
3728 }
3729 let item_point_range = Point::from_ts_point(item_node.start_position())
3730 ..Point::from_ts_point(item_node.end_position());
3731
3732 let mut open_point = None;
3733 let mut close_point = None;
3734 let mut buffer_ranges = Vec::new();
3735 for capture in mat.captures {
3736 let node_is_name;
3737 if capture.index == config.name_capture_ix {
3738 node_is_name = true;
3739 } else if Some(capture.index) == config.context_capture_ix
3740 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3741 {
3742 node_is_name = false;
3743 } else {
3744 if Some(capture.index) == config.open_capture_ix {
3745 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3746 } else if Some(capture.index) == config.close_capture_ix {
3747 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3748 }
3749
3750 continue;
3751 }
3752
3753 let mut range = capture.node.start_byte()..capture.node.end_byte();
3754 let start = capture.node.start_position();
3755 if capture.node.end_position().row > start.row {
3756 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3757 }
3758
3759 if !range.is_empty() {
3760 buffer_ranges.push((range, node_is_name));
3761 }
3762 }
3763 if buffer_ranges.is_empty() {
3764 return None;
3765 }
3766 let mut text = String::new();
3767 let mut highlight_ranges = Vec::new();
3768 let mut name_ranges = Vec::new();
3769 let mut chunks = self.chunks(
3770 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3771 true,
3772 );
3773 let mut last_buffer_range_end = 0;
3774
3775 for (buffer_range, is_name) in buffer_ranges {
3776 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3777 if space_added {
3778 text.push(' ');
3779 }
3780 let before_append_len = text.len();
3781 let mut offset = buffer_range.start;
3782 chunks.seek(buffer_range.clone());
3783 for mut chunk in chunks.by_ref() {
3784 if chunk.text.len() > buffer_range.end - offset {
3785 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3786 offset = buffer_range.end;
3787 } else {
3788 offset += chunk.text.len();
3789 }
3790 let style = chunk
3791 .syntax_highlight_id
3792 .zip(theme)
3793 .and_then(|(highlight, theme)| highlight.style(theme));
3794 if let Some(style) = style {
3795 let start = text.len();
3796 let end = start + chunk.text.len();
3797 highlight_ranges.push((start..end, style));
3798 }
3799 text.push_str(chunk.text);
3800 if offset >= buffer_range.end {
3801 break;
3802 }
3803 }
3804 if is_name {
3805 let after_append_len = text.len();
3806 let start = if space_added && !name_ranges.is_empty() {
3807 before_append_len - 1
3808 } else {
3809 before_append_len
3810 };
3811 name_ranges.push(start..after_append_len);
3812 }
3813 last_buffer_range_end = buffer_range.end;
3814 }
3815
3816 Some(OutlineItem {
3817 depth: 0, // We'll calculate the depth later
3818 range: item_point_range,
3819 text,
3820 highlight_ranges,
3821 name_ranges,
3822 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3823 annotation_range: None,
3824 })
3825 }
3826
3827 pub fn function_body_fold_ranges<T: ToOffset>(
3828 &self,
3829 within: Range<T>,
3830 ) -> impl Iterator<Item = Range<usize>> + '_ {
3831 self.text_object_ranges(within, TreeSitterOptions::default())
3832 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3833 }
3834
3835 /// For each grammar in the language, runs the provided
3836 /// [`tree_sitter::Query`] against the given range.
3837 pub fn matches(
3838 &self,
3839 range: Range<usize>,
3840 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3841 ) -> SyntaxMapMatches<'_> {
3842 self.syntax.matches(range, self, query)
3843 }
3844
3845 pub fn all_bracket_ranges(
3846 &self,
3847 range: Range<usize>,
3848 ) -> impl Iterator<Item = BracketMatch> + '_ {
3849 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3850 grammar.brackets_config.as_ref().map(|c| &c.query)
3851 });
3852 let configs = matches
3853 .grammars()
3854 .iter()
3855 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3856 .collect::<Vec<_>>();
3857
3858 iter::from_fn(move || {
3859 while let Some(mat) = matches.peek() {
3860 let mut open = None;
3861 let mut close = None;
3862 let config = &configs[mat.grammar_index];
3863 let pattern = &config.patterns[mat.pattern_index];
3864 for capture in mat.captures {
3865 if capture.index == config.open_capture_ix {
3866 open = Some(capture.node.byte_range());
3867 } else if capture.index == config.close_capture_ix {
3868 close = Some(capture.node.byte_range());
3869 }
3870 }
3871
3872 matches.advance();
3873
3874 let Some((open_range, close_range)) = open.zip(close) else {
3875 continue;
3876 };
3877
3878 let bracket_range = open_range.start..=close_range.end;
3879 if !bracket_range.overlaps(&range) {
3880 continue;
3881 }
3882
3883 return Some(BracketMatch {
3884 open_range,
3885 close_range,
3886 newline_only: pattern.newline_only,
3887 });
3888 }
3889 None
3890 })
3891 }
3892
3893 /// Returns bracket range pairs overlapping or adjacent to `range`
3894 pub fn bracket_ranges<T: ToOffset>(
3895 &self,
3896 range: Range<T>,
3897 ) -> impl Iterator<Item = BracketMatch> + '_ {
3898 // Find bracket pairs that *inclusively* contain the given range.
3899 let range = range.start.to_offset(self).saturating_sub(1)
3900 ..self.len().min(range.end.to_offset(self) + 1);
3901 self.all_bracket_ranges(range)
3902 .filter(|pair| !pair.newline_only)
3903 }
3904
3905 pub fn debug_variables_query<T: ToOffset>(
3906 &self,
3907 range: Range<T>,
3908 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3909 let range = range.start.to_offset(self).saturating_sub(1)
3910 ..self.len().min(range.end.to_offset(self) + 1);
3911
3912 let mut matches = self.syntax.matches_with_options(
3913 range.clone(),
3914 &self.text,
3915 TreeSitterOptions::default(),
3916 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3917 );
3918
3919 let configs = matches
3920 .grammars()
3921 .iter()
3922 .map(|grammar| grammar.debug_variables_config.as_ref())
3923 .collect::<Vec<_>>();
3924
3925 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3926
3927 iter::from_fn(move || {
3928 loop {
3929 while let Some(capture) = captures.pop() {
3930 if capture.0.overlaps(&range) {
3931 return Some(capture);
3932 }
3933 }
3934
3935 let mat = matches.peek()?;
3936
3937 let Some(config) = configs[mat.grammar_index].as_ref() else {
3938 matches.advance();
3939 continue;
3940 };
3941
3942 for capture in mat.captures {
3943 let Some(ix) = config
3944 .objects_by_capture_ix
3945 .binary_search_by_key(&capture.index, |e| e.0)
3946 .ok()
3947 else {
3948 continue;
3949 };
3950 let text_object = config.objects_by_capture_ix[ix].1;
3951 let byte_range = capture.node.byte_range();
3952
3953 let mut found = false;
3954 for (range, existing) in captures.iter_mut() {
3955 if existing == &text_object {
3956 range.start = range.start.min(byte_range.start);
3957 range.end = range.end.max(byte_range.end);
3958 found = true;
3959 break;
3960 }
3961 }
3962
3963 if !found {
3964 captures.push((byte_range, text_object));
3965 }
3966 }
3967
3968 matches.advance();
3969 }
3970 })
3971 }
3972
3973 pub fn text_object_ranges<T: ToOffset>(
3974 &self,
3975 range: Range<T>,
3976 options: TreeSitterOptions,
3977 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3978 let range = range.start.to_offset(self).saturating_sub(1)
3979 ..self.len().min(range.end.to_offset(self) + 1);
3980
3981 let mut matches =
3982 self.syntax
3983 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3984 grammar.text_object_config.as_ref().map(|c| &c.query)
3985 });
3986
3987 let configs = matches
3988 .grammars()
3989 .iter()
3990 .map(|grammar| grammar.text_object_config.as_ref())
3991 .collect::<Vec<_>>();
3992
3993 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3994
3995 iter::from_fn(move || {
3996 loop {
3997 while let Some(capture) = captures.pop() {
3998 if capture.0.overlaps(&range) {
3999 return Some(capture);
4000 }
4001 }
4002
4003 let mat = matches.peek()?;
4004
4005 let Some(config) = configs[mat.grammar_index].as_ref() else {
4006 matches.advance();
4007 continue;
4008 };
4009
4010 for capture in mat.captures {
4011 let Some(ix) = config
4012 .text_objects_by_capture_ix
4013 .binary_search_by_key(&capture.index, |e| e.0)
4014 .ok()
4015 else {
4016 continue;
4017 };
4018 let text_object = config.text_objects_by_capture_ix[ix].1;
4019 let byte_range = capture.node.byte_range();
4020
4021 let mut found = false;
4022 for (range, existing) in captures.iter_mut() {
4023 if existing == &text_object {
4024 range.start = range.start.min(byte_range.start);
4025 range.end = range.end.max(byte_range.end);
4026 found = true;
4027 break;
4028 }
4029 }
4030
4031 if !found {
4032 captures.push((byte_range, text_object));
4033 }
4034 }
4035
4036 matches.advance();
4037 }
4038 })
4039 }
4040
4041 /// Returns enclosing bracket ranges containing the given range
4042 pub fn enclosing_bracket_ranges<T: ToOffset>(
4043 &self,
4044 range: Range<T>,
4045 ) -> impl Iterator<Item = BracketMatch> + '_ {
4046 let range = range.start.to_offset(self)..range.end.to_offset(self);
4047
4048 self.bracket_ranges(range.clone()).filter(move |pair| {
4049 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4050 })
4051 }
4052
4053 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4054 ///
4055 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4056 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4057 &self,
4058 range: Range<T>,
4059 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4060 ) -> Option<(Range<usize>, Range<usize>)> {
4061 let range = range.start.to_offset(self)..range.end.to_offset(self);
4062
4063 // Get the ranges of the innermost pair of brackets.
4064 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4065
4066 for pair in self.enclosing_bracket_ranges(range.clone()) {
4067 if let Some(range_filter) = range_filter {
4068 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
4069 continue;
4070 }
4071 }
4072
4073 let len = pair.close_range.end - pair.open_range.start;
4074
4075 if let Some((existing_open, existing_close)) = &result {
4076 let existing_len = existing_close.end - existing_open.start;
4077 if len > existing_len {
4078 continue;
4079 }
4080 }
4081
4082 result = Some((pair.open_range, pair.close_range));
4083 }
4084
4085 result
4086 }
4087
4088 /// Returns anchor ranges for any matches of the redaction query.
4089 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4090 /// will be run on the relevant section of the buffer.
4091 pub fn redacted_ranges<T: ToOffset>(
4092 &self,
4093 range: Range<T>,
4094 ) -> impl Iterator<Item = Range<usize>> + '_ {
4095 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4096 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4097 grammar
4098 .redactions_config
4099 .as_ref()
4100 .map(|config| &config.query)
4101 });
4102
4103 let configs = syntax_matches
4104 .grammars()
4105 .iter()
4106 .map(|grammar| grammar.redactions_config.as_ref())
4107 .collect::<Vec<_>>();
4108
4109 iter::from_fn(move || {
4110 let redacted_range = syntax_matches
4111 .peek()
4112 .and_then(|mat| {
4113 configs[mat.grammar_index].and_then(|config| {
4114 mat.captures
4115 .iter()
4116 .find(|capture| capture.index == config.redaction_capture_ix)
4117 })
4118 })
4119 .map(|mat| mat.node.byte_range());
4120 syntax_matches.advance();
4121 redacted_range
4122 })
4123 }
4124
4125 pub fn injections_intersecting_range<T: ToOffset>(
4126 &self,
4127 range: Range<T>,
4128 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4129 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4130
4131 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4132 grammar
4133 .injection_config
4134 .as_ref()
4135 .map(|config| &config.query)
4136 });
4137
4138 let configs = syntax_matches
4139 .grammars()
4140 .iter()
4141 .map(|grammar| grammar.injection_config.as_ref())
4142 .collect::<Vec<_>>();
4143
4144 iter::from_fn(move || {
4145 let ranges = syntax_matches.peek().and_then(|mat| {
4146 let config = &configs[mat.grammar_index]?;
4147 let content_capture_range = mat.captures.iter().find_map(|capture| {
4148 if capture.index == config.content_capture_ix {
4149 Some(capture.node.byte_range())
4150 } else {
4151 None
4152 }
4153 })?;
4154 let language = self.language_at(content_capture_range.start)?;
4155 Some((content_capture_range, language))
4156 });
4157 syntax_matches.advance();
4158 ranges
4159 })
4160 }
4161
4162 pub fn runnable_ranges(
4163 &self,
4164 offset_range: Range<usize>,
4165 ) -> impl Iterator<Item = RunnableRange> + '_ {
4166 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4167 grammar.runnable_config.as_ref().map(|config| &config.query)
4168 });
4169
4170 let test_configs = syntax_matches
4171 .grammars()
4172 .iter()
4173 .map(|grammar| grammar.runnable_config.as_ref())
4174 .collect::<Vec<_>>();
4175
4176 iter::from_fn(move || {
4177 loop {
4178 let mat = syntax_matches.peek()?;
4179
4180 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4181 let mut run_range = None;
4182 let full_range = mat.captures.iter().fold(
4183 Range {
4184 start: usize::MAX,
4185 end: 0,
4186 },
4187 |mut acc, next| {
4188 let byte_range = next.node.byte_range();
4189 if acc.start > byte_range.start {
4190 acc.start = byte_range.start;
4191 }
4192 if acc.end < byte_range.end {
4193 acc.end = byte_range.end;
4194 }
4195 acc
4196 },
4197 );
4198 if full_range.start > full_range.end {
4199 // We did not find a full spanning range of this match.
4200 return None;
4201 }
4202 let extra_captures: SmallVec<[_; 1]> =
4203 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4204 test_configs
4205 .extra_captures
4206 .get(capture.index as usize)
4207 .cloned()
4208 .and_then(|tag_name| match tag_name {
4209 RunnableCapture::Named(name) => {
4210 Some((capture.node.byte_range(), name))
4211 }
4212 RunnableCapture::Run => {
4213 let _ = run_range.insert(capture.node.byte_range());
4214 None
4215 }
4216 })
4217 }));
4218 let run_range = run_range?;
4219 let tags = test_configs
4220 .query
4221 .property_settings(mat.pattern_index)
4222 .iter()
4223 .filter_map(|property| {
4224 if *property.key == *"tag" {
4225 property
4226 .value
4227 .as_ref()
4228 .map(|value| RunnableTag(value.to_string().into()))
4229 } else {
4230 None
4231 }
4232 })
4233 .collect();
4234 let extra_captures = extra_captures
4235 .into_iter()
4236 .map(|(range, name)| {
4237 (
4238 name.to_string(),
4239 self.text_for_range(range.clone()).collect::<String>(),
4240 )
4241 })
4242 .collect();
4243 // All tags should have the same range.
4244 Some(RunnableRange {
4245 run_range,
4246 full_range,
4247 runnable: Runnable {
4248 tags,
4249 language: mat.language,
4250 buffer: self.remote_id(),
4251 },
4252 extra_captures,
4253 buffer_id: self.remote_id(),
4254 })
4255 });
4256
4257 syntax_matches.advance();
4258 if test_range.is_some() {
4259 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4260 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4261 return test_range;
4262 }
4263 }
4264 })
4265 }
4266
4267 /// Returns selections for remote peers intersecting the given range.
4268 #[allow(clippy::type_complexity)]
4269 pub fn selections_in_range(
4270 &self,
4271 range: Range<Anchor>,
4272 include_local: bool,
4273 ) -> impl Iterator<
4274 Item = (
4275 ReplicaId,
4276 bool,
4277 CursorShape,
4278 impl Iterator<Item = &Selection<Anchor>> + '_,
4279 ),
4280 > + '_ {
4281 self.remote_selections
4282 .iter()
4283 .filter(move |(replica_id, set)| {
4284 (include_local || **replica_id != self.text.replica_id())
4285 && !set.selections.is_empty()
4286 })
4287 .map(move |(replica_id, set)| {
4288 let start_ix = match set.selections.binary_search_by(|probe| {
4289 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4290 }) {
4291 Ok(ix) | Err(ix) => ix,
4292 };
4293 let end_ix = match set.selections.binary_search_by(|probe| {
4294 probe.start.cmp(&range.end, self).then(Ordering::Less)
4295 }) {
4296 Ok(ix) | Err(ix) => ix,
4297 };
4298
4299 (
4300 *replica_id,
4301 set.line_mode,
4302 set.cursor_shape,
4303 set.selections[start_ix..end_ix].iter(),
4304 )
4305 })
4306 }
4307
4308 /// Returns if the buffer contains any diagnostics.
4309 pub fn has_diagnostics(&self) -> bool {
4310 !self.diagnostics.is_empty()
4311 }
4312
4313 /// Returns all the diagnostics intersecting the given range.
4314 pub fn diagnostics_in_range<'a, T, O>(
4315 &'a self,
4316 search_range: Range<T>,
4317 reversed: bool,
4318 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4319 where
4320 T: 'a + Clone + ToOffset,
4321 O: 'a + FromAnchor,
4322 {
4323 let mut iterators: Vec<_> = self
4324 .diagnostics
4325 .iter()
4326 .map(|(_, collection)| {
4327 collection
4328 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4329 .peekable()
4330 })
4331 .collect();
4332
4333 std::iter::from_fn(move || {
4334 let (next_ix, _) = iterators
4335 .iter_mut()
4336 .enumerate()
4337 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4338 .min_by(|(_, a), (_, b)| {
4339 let cmp = a
4340 .range
4341 .start
4342 .cmp(&b.range.start, self)
4343 // when range is equal, sort by diagnostic severity
4344 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4345 // and stabilize order with group_id
4346 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4347 if reversed { cmp.reverse() } else { cmp }
4348 })?;
4349 iterators[next_ix]
4350 .next()
4351 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4352 diagnostic,
4353 range: FromAnchor::from_anchor(&range.start, self)
4354 ..FromAnchor::from_anchor(&range.end, self),
4355 })
4356 })
4357 }
4358
4359 /// Returns all the diagnostic groups associated with the given
4360 /// language server ID. If no language server ID is provided,
4361 /// all diagnostics groups are returned.
4362 pub fn diagnostic_groups(
4363 &self,
4364 language_server_id: Option<LanguageServerId>,
4365 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4366 let mut groups = Vec::new();
4367
4368 if let Some(language_server_id) = language_server_id {
4369 if let Ok(ix) = self
4370 .diagnostics
4371 .binary_search_by_key(&language_server_id, |e| e.0)
4372 {
4373 self.diagnostics[ix]
4374 .1
4375 .groups(language_server_id, &mut groups, self);
4376 }
4377 } else {
4378 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4379 diagnostics.groups(*language_server_id, &mut groups, self);
4380 }
4381 }
4382
4383 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4384 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4385 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4386 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4387 });
4388
4389 groups
4390 }
4391
4392 /// Returns an iterator over the diagnostics for the given group.
4393 pub fn diagnostic_group<O>(
4394 &self,
4395 group_id: usize,
4396 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4397 where
4398 O: FromAnchor + 'static,
4399 {
4400 self.diagnostics
4401 .iter()
4402 .flat_map(move |(_, set)| set.group(group_id, self))
4403 }
4404
4405 /// An integer version number that accounts for all updates besides
4406 /// the buffer's text itself (which is versioned via a version vector).
4407 pub fn non_text_state_update_count(&self) -> usize {
4408 self.non_text_state_update_count
4409 }
4410
4411 /// An integer version that changes when the buffer's syntax changes.
4412 pub fn syntax_update_count(&self) -> usize {
4413 self.syntax.update_count()
4414 }
4415
4416 /// Returns a snapshot of underlying file.
4417 pub fn file(&self) -> Option<&Arc<dyn File>> {
4418 self.file.as_ref()
4419 }
4420
4421 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4422 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4423 if let Some(file) = self.file() {
4424 if file.path().file_name().is_none() || include_root {
4425 Some(file.full_path(cx))
4426 } else {
4427 Some(file.path().to_path_buf())
4428 }
4429 } else {
4430 None
4431 }
4432 }
4433
4434 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4435 let query_str = query.fuzzy_contents;
4436 if query_str.map_or(false, |query| query.is_empty()) {
4437 return BTreeMap::default();
4438 }
4439
4440 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4441 language,
4442 override_id: None,
4443 }));
4444
4445 let mut query_ix = 0;
4446 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4447 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4448
4449 let mut words = BTreeMap::default();
4450 let mut current_word_start_ix = None;
4451 let mut chunk_ix = query.range.start;
4452 for chunk in self.chunks(query.range, false) {
4453 for (i, c) in chunk.text.char_indices() {
4454 let ix = chunk_ix + i;
4455 if classifier.is_word(c) {
4456 if current_word_start_ix.is_none() {
4457 current_word_start_ix = Some(ix);
4458 }
4459
4460 if let Some(query_chars) = &query_chars {
4461 if query_ix < query_len {
4462 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4463 query_ix += 1;
4464 }
4465 }
4466 }
4467 continue;
4468 } else if let Some(word_start) = current_word_start_ix.take() {
4469 if query_ix == query_len {
4470 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4471 let mut word_text = self.text_for_range(word_start..ix).peekable();
4472 let first_char = word_text
4473 .peek()
4474 .and_then(|first_chunk| first_chunk.chars().next());
4475 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4476 if !query.skip_digits
4477 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4478 {
4479 words.insert(word_text.collect(), word_range);
4480 }
4481 }
4482 }
4483 query_ix = 0;
4484 }
4485 chunk_ix += chunk.text.len();
4486 }
4487
4488 words
4489 }
4490}
4491
4492pub struct WordsQuery<'a> {
4493 /// Only returns words with all chars from the fuzzy string in them.
4494 pub fuzzy_contents: Option<&'a str>,
4495 /// Skips words that start with a digit.
4496 pub skip_digits: bool,
4497 /// Buffer offset range, to look for words.
4498 pub range: Range<usize>,
4499}
4500
4501fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4502 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4503}
4504
4505fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4506 let mut result = IndentSize::spaces(0);
4507 for c in text {
4508 let kind = match c {
4509 ' ' => IndentKind::Space,
4510 '\t' => IndentKind::Tab,
4511 _ => break,
4512 };
4513 if result.len == 0 {
4514 result.kind = kind;
4515 }
4516 result.len += 1;
4517 }
4518 result
4519}
4520
4521impl Clone for BufferSnapshot {
4522 fn clone(&self) -> Self {
4523 Self {
4524 text: self.text.clone(),
4525 syntax: self.syntax.clone(),
4526 file: self.file.clone(),
4527 remote_selections: self.remote_selections.clone(),
4528 diagnostics: self.diagnostics.clone(),
4529 language: self.language.clone(),
4530 non_text_state_update_count: self.non_text_state_update_count,
4531 }
4532 }
4533}
4534
4535impl Deref for BufferSnapshot {
4536 type Target = text::BufferSnapshot;
4537
4538 fn deref(&self) -> &Self::Target {
4539 &self.text
4540 }
4541}
4542
4543unsafe impl Send for BufferChunks<'_> {}
4544
4545impl<'a> BufferChunks<'a> {
4546 pub(crate) fn new(
4547 text: &'a Rope,
4548 range: Range<usize>,
4549 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4550 diagnostics: bool,
4551 buffer_snapshot: Option<&'a BufferSnapshot>,
4552 ) -> Self {
4553 let mut highlights = None;
4554 if let Some((captures, highlight_maps)) = syntax {
4555 highlights = Some(BufferChunkHighlights {
4556 captures,
4557 next_capture: None,
4558 stack: Default::default(),
4559 highlight_maps,
4560 })
4561 }
4562
4563 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4564 let chunks = text.chunks_in_range(range.clone());
4565
4566 let mut this = BufferChunks {
4567 range,
4568 buffer_snapshot,
4569 chunks,
4570 diagnostic_endpoints,
4571 error_depth: 0,
4572 warning_depth: 0,
4573 information_depth: 0,
4574 hint_depth: 0,
4575 unnecessary_depth: 0,
4576 underline: true,
4577 highlights,
4578 };
4579 this.initialize_diagnostic_endpoints();
4580 this
4581 }
4582
4583 /// Seeks to the given byte offset in the buffer.
4584 pub fn seek(&mut self, range: Range<usize>) {
4585 let old_range = std::mem::replace(&mut self.range, range.clone());
4586 self.chunks.set_range(self.range.clone());
4587 if let Some(highlights) = self.highlights.as_mut() {
4588 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4589 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4590 highlights
4591 .stack
4592 .retain(|(end_offset, _)| *end_offset > range.start);
4593 if let Some(capture) = &highlights.next_capture {
4594 if range.start >= capture.node.start_byte() {
4595 let next_capture_end = capture.node.end_byte();
4596 if range.start < next_capture_end {
4597 highlights.stack.push((
4598 next_capture_end,
4599 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4600 ));
4601 }
4602 highlights.next_capture.take();
4603 }
4604 }
4605 } else if let Some(snapshot) = self.buffer_snapshot {
4606 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4607 *highlights = BufferChunkHighlights {
4608 captures,
4609 next_capture: None,
4610 stack: Default::default(),
4611 highlight_maps,
4612 };
4613 } else {
4614 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4615 // Seeking such BufferChunks is not supported.
4616 debug_assert!(
4617 false,
4618 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4619 );
4620 }
4621
4622 highlights.captures.set_byte_range(self.range.clone());
4623 self.initialize_diagnostic_endpoints();
4624 }
4625 }
4626
4627 fn initialize_diagnostic_endpoints(&mut self) {
4628 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4629 if let Some(buffer) = self.buffer_snapshot {
4630 let mut diagnostic_endpoints = Vec::new();
4631 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4632 diagnostic_endpoints.push(DiagnosticEndpoint {
4633 offset: entry.range.start,
4634 is_start: true,
4635 severity: entry.diagnostic.severity,
4636 is_unnecessary: entry.diagnostic.is_unnecessary,
4637 underline: entry.diagnostic.underline,
4638 });
4639 diagnostic_endpoints.push(DiagnosticEndpoint {
4640 offset: entry.range.end,
4641 is_start: false,
4642 severity: entry.diagnostic.severity,
4643 is_unnecessary: entry.diagnostic.is_unnecessary,
4644 underline: entry.diagnostic.underline,
4645 });
4646 }
4647 diagnostic_endpoints
4648 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4649 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4650 self.hint_depth = 0;
4651 self.error_depth = 0;
4652 self.warning_depth = 0;
4653 self.information_depth = 0;
4654 }
4655 }
4656 }
4657
4658 /// The current byte offset in the buffer.
4659 pub fn offset(&self) -> usize {
4660 self.range.start
4661 }
4662
4663 pub fn range(&self) -> Range<usize> {
4664 self.range.clone()
4665 }
4666
4667 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4668 let depth = match endpoint.severity {
4669 DiagnosticSeverity::ERROR => &mut self.error_depth,
4670 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4671 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4672 DiagnosticSeverity::HINT => &mut self.hint_depth,
4673 _ => return,
4674 };
4675 if endpoint.is_start {
4676 *depth += 1;
4677 } else {
4678 *depth -= 1;
4679 }
4680
4681 if endpoint.is_unnecessary {
4682 if endpoint.is_start {
4683 self.unnecessary_depth += 1;
4684 } else {
4685 self.unnecessary_depth -= 1;
4686 }
4687 }
4688 }
4689
4690 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4691 if self.error_depth > 0 {
4692 Some(DiagnosticSeverity::ERROR)
4693 } else if self.warning_depth > 0 {
4694 Some(DiagnosticSeverity::WARNING)
4695 } else if self.information_depth > 0 {
4696 Some(DiagnosticSeverity::INFORMATION)
4697 } else if self.hint_depth > 0 {
4698 Some(DiagnosticSeverity::HINT)
4699 } else {
4700 None
4701 }
4702 }
4703
4704 fn current_code_is_unnecessary(&self) -> bool {
4705 self.unnecessary_depth > 0
4706 }
4707}
4708
4709impl<'a> Iterator for BufferChunks<'a> {
4710 type Item = Chunk<'a>;
4711
4712 fn next(&mut self) -> Option<Self::Item> {
4713 let mut next_capture_start = usize::MAX;
4714 let mut next_diagnostic_endpoint = usize::MAX;
4715
4716 if let Some(highlights) = self.highlights.as_mut() {
4717 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4718 if *parent_capture_end <= self.range.start {
4719 highlights.stack.pop();
4720 } else {
4721 break;
4722 }
4723 }
4724
4725 if highlights.next_capture.is_none() {
4726 highlights.next_capture = highlights.captures.next();
4727 }
4728
4729 while let Some(capture) = highlights.next_capture.as_ref() {
4730 if self.range.start < capture.node.start_byte() {
4731 next_capture_start = capture.node.start_byte();
4732 break;
4733 } else {
4734 let highlight_id =
4735 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4736 highlights
4737 .stack
4738 .push((capture.node.end_byte(), highlight_id));
4739 highlights.next_capture = highlights.captures.next();
4740 }
4741 }
4742 }
4743
4744 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4745 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4746 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4747 if endpoint.offset <= self.range.start {
4748 self.update_diagnostic_depths(endpoint);
4749 diagnostic_endpoints.next();
4750 self.underline = endpoint.underline;
4751 } else {
4752 next_diagnostic_endpoint = endpoint.offset;
4753 break;
4754 }
4755 }
4756 }
4757 self.diagnostic_endpoints = diagnostic_endpoints;
4758
4759 if let Some(chunk) = self.chunks.peek() {
4760 let chunk_start = self.range.start;
4761 let mut chunk_end = (self.chunks.offset() + chunk.len())
4762 .min(next_capture_start)
4763 .min(next_diagnostic_endpoint);
4764 let mut highlight_id = None;
4765 if let Some(highlights) = self.highlights.as_ref() {
4766 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4767 chunk_end = chunk_end.min(*parent_capture_end);
4768 highlight_id = Some(*parent_highlight_id);
4769 }
4770 }
4771
4772 let slice =
4773 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4774 self.range.start = chunk_end;
4775 if self.range.start == self.chunks.offset() + chunk.len() {
4776 self.chunks.next().unwrap();
4777 }
4778
4779 Some(Chunk {
4780 text: slice,
4781 syntax_highlight_id: highlight_id,
4782 underline: self.underline,
4783 diagnostic_severity: self.current_diagnostic_severity(),
4784 is_unnecessary: self.current_code_is_unnecessary(),
4785 ..Chunk::default()
4786 })
4787 } else {
4788 None
4789 }
4790 }
4791}
4792
4793impl operation_queue::Operation for Operation {
4794 fn lamport_timestamp(&self) -> clock::Lamport {
4795 match self {
4796 Operation::Buffer(_) => {
4797 unreachable!("buffer operations should never be deferred at this layer")
4798 }
4799 Operation::UpdateDiagnostics {
4800 lamport_timestamp, ..
4801 }
4802 | Operation::UpdateSelections {
4803 lamport_timestamp, ..
4804 }
4805 | Operation::UpdateCompletionTriggers {
4806 lamport_timestamp, ..
4807 } => *lamport_timestamp,
4808 }
4809 }
4810}
4811
4812impl Default for Diagnostic {
4813 fn default() -> Self {
4814 Self {
4815 source: Default::default(),
4816 source_kind: DiagnosticSourceKind::Other,
4817 code: None,
4818 code_description: None,
4819 severity: DiagnosticSeverity::ERROR,
4820 message: Default::default(),
4821 markdown: None,
4822 group_id: 0,
4823 is_primary: false,
4824 is_disk_based: false,
4825 is_unnecessary: false,
4826 underline: true,
4827 data: None,
4828 }
4829 }
4830}
4831
4832impl IndentSize {
4833 /// Returns an [`IndentSize`] representing the given spaces.
4834 pub fn spaces(len: u32) -> Self {
4835 Self {
4836 len,
4837 kind: IndentKind::Space,
4838 }
4839 }
4840
4841 /// Returns an [`IndentSize`] representing a tab.
4842 pub fn tab() -> Self {
4843 Self {
4844 len: 1,
4845 kind: IndentKind::Tab,
4846 }
4847 }
4848
4849 /// An iterator over the characters represented by this [`IndentSize`].
4850 pub fn chars(&self) -> impl Iterator<Item = char> {
4851 iter::repeat(self.char()).take(self.len as usize)
4852 }
4853
4854 /// The character representation of this [`IndentSize`].
4855 pub fn char(&self) -> char {
4856 match self.kind {
4857 IndentKind::Space => ' ',
4858 IndentKind::Tab => '\t',
4859 }
4860 }
4861
4862 /// Consumes the current [`IndentSize`] and returns a new one that has
4863 /// been shrunk or enlarged by the given size along the given direction.
4864 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4865 match direction {
4866 Ordering::Less => {
4867 if self.kind == size.kind && self.len >= size.len {
4868 self.len -= size.len;
4869 }
4870 }
4871 Ordering::Equal => {}
4872 Ordering::Greater => {
4873 if self.len == 0 {
4874 self = size;
4875 } else if self.kind == size.kind {
4876 self.len += size.len;
4877 }
4878 }
4879 }
4880 self
4881 }
4882
4883 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4884 match self.kind {
4885 IndentKind::Space => self.len as usize,
4886 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4887 }
4888 }
4889}
4890
4891#[cfg(any(test, feature = "test-support"))]
4892pub struct TestFile {
4893 pub path: Arc<Path>,
4894 pub root_name: String,
4895 pub local_root: Option<PathBuf>,
4896}
4897
4898#[cfg(any(test, feature = "test-support"))]
4899impl File for TestFile {
4900 fn path(&self) -> &Arc<Path> {
4901 &self.path
4902 }
4903
4904 fn full_path(&self, _: &gpui::App) -> PathBuf {
4905 PathBuf::from(&self.root_name).join(self.path.as_ref())
4906 }
4907
4908 fn as_local(&self) -> Option<&dyn LocalFile> {
4909 if self.local_root.is_some() {
4910 Some(self)
4911 } else {
4912 None
4913 }
4914 }
4915
4916 fn disk_state(&self) -> DiskState {
4917 unimplemented!()
4918 }
4919
4920 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4921 self.path().file_name().unwrap_or(self.root_name.as_ref())
4922 }
4923
4924 fn worktree_id(&self, _: &App) -> WorktreeId {
4925 WorktreeId::from_usize(0)
4926 }
4927
4928 fn to_proto(&self, _: &App) -> rpc::proto::File {
4929 unimplemented!()
4930 }
4931
4932 fn is_private(&self) -> bool {
4933 false
4934 }
4935}
4936
4937#[cfg(any(test, feature = "test-support"))]
4938impl LocalFile for TestFile {
4939 fn abs_path(&self, _cx: &App) -> PathBuf {
4940 PathBuf::from(self.local_root.as_ref().unwrap())
4941 .join(&self.root_name)
4942 .join(self.path.as_ref())
4943 }
4944
4945 fn load(&self, _cx: &App) -> Task<Result<String>> {
4946 unimplemented!()
4947 }
4948
4949 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4950 unimplemented!()
4951 }
4952}
4953
4954pub(crate) fn contiguous_ranges(
4955 values: impl Iterator<Item = u32>,
4956 max_len: usize,
4957) -> impl Iterator<Item = Range<u32>> {
4958 let mut values = values;
4959 let mut current_range: Option<Range<u32>> = None;
4960 std::iter::from_fn(move || {
4961 loop {
4962 if let Some(value) = values.next() {
4963 if let Some(range) = &mut current_range {
4964 if value == range.end && range.len() < max_len {
4965 range.end += 1;
4966 continue;
4967 }
4968 }
4969
4970 let prev_range = current_range.clone();
4971 current_range = Some(value..(value + 1));
4972 if prev_range.is_some() {
4973 return prev_range;
4974 }
4975 } else {
4976 return current_range.take();
4977 }
4978 }
4979 })
4980}
4981
4982#[derive(Default, Debug)]
4983pub struct CharClassifier {
4984 scope: Option<LanguageScope>,
4985 for_completion: bool,
4986 ignore_punctuation: bool,
4987}
4988
4989impl CharClassifier {
4990 pub fn new(scope: Option<LanguageScope>) -> Self {
4991 Self {
4992 scope,
4993 for_completion: false,
4994 ignore_punctuation: false,
4995 }
4996 }
4997
4998 pub fn for_completion(self, for_completion: bool) -> Self {
4999 Self {
5000 for_completion,
5001 ..self
5002 }
5003 }
5004
5005 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5006 Self {
5007 ignore_punctuation,
5008 ..self
5009 }
5010 }
5011
5012 pub fn is_whitespace(&self, c: char) -> bool {
5013 self.kind(c) == CharKind::Whitespace
5014 }
5015
5016 pub fn is_word(&self, c: char) -> bool {
5017 self.kind(c) == CharKind::Word
5018 }
5019
5020 pub fn is_punctuation(&self, c: char) -> bool {
5021 self.kind(c) == CharKind::Punctuation
5022 }
5023
5024 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5025 if c.is_alphanumeric() || c == '_' {
5026 return CharKind::Word;
5027 }
5028
5029 if let Some(scope) = &self.scope {
5030 let characters = if self.for_completion {
5031 scope.completion_query_characters()
5032 } else {
5033 scope.word_characters()
5034 };
5035 if let Some(characters) = characters {
5036 if characters.contains(&c) {
5037 return CharKind::Word;
5038 }
5039 }
5040 }
5041
5042 if c.is_whitespace() {
5043 return CharKind::Whitespace;
5044 }
5045
5046 if ignore_punctuation {
5047 CharKind::Word
5048 } else {
5049 CharKind::Punctuation
5050 }
5051 }
5052
5053 pub fn kind(&self, c: char) -> CharKind {
5054 self.kind_with(c, self.ignore_punctuation)
5055 }
5056}
5057
5058/// Find all of the ranges of whitespace that occur at the ends of lines
5059/// in the given rope.
5060///
5061/// This could also be done with a regex search, but this implementation
5062/// avoids copying text.
5063pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5064 let mut ranges = Vec::new();
5065
5066 let mut offset = 0;
5067 let mut prev_chunk_trailing_whitespace_range = 0..0;
5068 for chunk in rope.chunks() {
5069 let mut prev_line_trailing_whitespace_range = 0..0;
5070 for (i, line) in chunk.split('\n').enumerate() {
5071 let line_end_offset = offset + line.len();
5072 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5073 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5074
5075 if i == 0 && trimmed_line_len == 0 {
5076 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5077 }
5078 if !prev_line_trailing_whitespace_range.is_empty() {
5079 ranges.push(prev_line_trailing_whitespace_range);
5080 }
5081
5082 offset = line_end_offset + 1;
5083 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5084 }
5085
5086 offset -= 1;
5087 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5088 }
5089
5090 if !prev_chunk_trailing_whitespace_range.is_empty() {
5091 ranges.push(prev_chunk_trailing_whitespace_range);
5092 }
5093
5094 ranges
5095}