1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 wait_for_autoindent_txs: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 change_bits: Default::default(),
960 _subscriptions: Vec::new(),
961 }
962 }
963
964 pub fn build_snapshot(
965 text: Rope,
966 language: Option<Arc<Language>>,
967 language_registry: Option<Arc<LanguageRegistry>>,
968 cx: &mut App,
969 ) -> impl Future<Output = BufferSnapshot> + use<> {
970 let entity_id = cx.reserve_entity::<Self>().entity_id();
971 let buffer_id = entity_id.as_non_zero_u64().into();
972 async move {
973 let text =
974 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
975 let mut syntax = SyntaxMap::new(&text).snapshot();
976 if let Some(language) = language.clone() {
977 let text = text.clone();
978 let language = language.clone();
979 let language_registry = language_registry.clone();
980 syntax.reparse(&text, language_registry, language);
981 }
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language,
989 non_text_state_update_count: 0,
990 }
991 }
992 }
993
994 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
995 let entity_id = cx.reserve_entity::<Self>().entity_id();
996 let buffer_id = entity_id.as_non_zero_u64().into();
997 let text =
998 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
999 let syntax = SyntaxMap::new(&text).snapshot();
1000 BufferSnapshot {
1001 text,
1002 syntax,
1003 file: None,
1004 diagnostics: Default::default(),
1005 remote_selections: Default::default(),
1006 language: None,
1007 non_text_state_update_count: 0,
1008 }
1009 }
1010
1011 #[cfg(any(test, feature = "test-support"))]
1012 pub fn build_snapshot_sync(
1013 text: Rope,
1014 language: Option<Arc<Language>>,
1015 language_registry: Option<Arc<LanguageRegistry>>,
1016 cx: &mut App,
1017 ) -> BufferSnapshot {
1018 let entity_id = cx.reserve_entity::<Self>().entity_id();
1019 let buffer_id = entity_id.as_non_zero_u64().into();
1020 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1021 let mut syntax = SyntaxMap::new(&text).snapshot();
1022 if let Some(language) = language.clone() {
1023 let text = text.clone();
1024 let language = language.clone();
1025 let language_registry = language_registry.clone();
1026 syntax.reparse(&text, language_registry, language);
1027 }
1028 BufferSnapshot {
1029 text,
1030 syntax,
1031 file: None,
1032 diagnostics: Default::default(),
1033 remote_selections: Default::default(),
1034 language,
1035 non_text_state_update_count: 0,
1036 }
1037 }
1038
1039 /// Retrieve a snapshot of the buffer's current state. This is computationally
1040 /// cheap, and allows reading from the buffer on a background thread.
1041 pub fn snapshot(&self) -> BufferSnapshot {
1042 let text = self.text.snapshot();
1043 let mut syntax_map = self.syntax_map.lock();
1044 syntax_map.interpolate(&text);
1045 let syntax = syntax_map.snapshot();
1046
1047 BufferSnapshot {
1048 text,
1049 syntax,
1050 file: self.file.clone(),
1051 remote_selections: self.remote_selections.clone(),
1052 diagnostics: self.diagnostics.clone(),
1053 language: self.language.clone(),
1054 non_text_state_update_count: self.non_text_state_update_count,
1055 }
1056 }
1057
1058 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1059 let this = cx.entity();
1060 cx.new(|cx| {
1061 let mut branch = Self {
1062 branch_state: Some(BufferBranchState {
1063 base_buffer: this.clone(),
1064 merged_operations: Default::default(),
1065 }),
1066 language: self.language.clone(),
1067 has_conflict: self.has_conflict,
1068 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1069 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1070 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1071 };
1072 if let Some(language_registry) = self.language_registry() {
1073 branch.set_language_registry(language_registry);
1074 }
1075
1076 // Reparse the branch buffer so that we get syntax highlighting immediately.
1077 branch.reparse(cx);
1078
1079 branch
1080 })
1081 }
1082
1083 pub fn preview_edits(
1084 &self,
1085 edits: Arc<[(Range<Anchor>, String)]>,
1086 cx: &App,
1087 ) -> Task<EditPreview> {
1088 let registry = self.language_registry();
1089 let language = self.language().cloned();
1090 let old_snapshot = self.text.snapshot();
1091 let mut branch_buffer = self.text.branch();
1092 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1093 cx.background_spawn(async move {
1094 if !edits.is_empty() {
1095 if let Some(language) = language.clone() {
1096 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1097 }
1098
1099 branch_buffer.edit(edits.iter().cloned());
1100 let snapshot = branch_buffer.snapshot();
1101 syntax_snapshot.interpolate(&snapshot);
1102
1103 if let Some(language) = language {
1104 syntax_snapshot.reparse(&snapshot, registry, language);
1105 }
1106 }
1107 EditPreview {
1108 old_snapshot,
1109 applied_edits_snapshot: branch_buffer.snapshot(),
1110 syntax_snapshot,
1111 }
1112 })
1113 }
1114
1115 /// Applies all of the changes in this buffer that intersect any of the
1116 /// given `ranges` to its base buffer.
1117 ///
1118 /// If `ranges` is empty, then all changes will be applied. This buffer must
1119 /// be a branch buffer to call this method.
1120 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1121 let Some(base_buffer) = self.base_buffer() else {
1122 debug_panic!("not a branch buffer");
1123 return;
1124 };
1125
1126 let mut ranges = if ranges.is_empty() {
1127 &[0..usize::MAX]
1128 } else {
1129 ranges.as_slice()
1130 }
1131 .into_iter()
1132 .peekable();
1133
1134 let mut edits = Vec::new();
1135 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1136 let mut is_included = false;
1137 while let Some(range) = ranges.peek() {
1138 if range.end < edit.new.start {
1139 ranges.next().unwrap();
1140 } else {
1141 if range.start <= edit.new.end {
1142 is_included = true;
1143 }
1144 break;
1145 }
1146 }
1147
1148 if is_included {
1149 edits.push((
1150 edit.old.clone(),
1151 self.text_for_range(edit.new.clone()).collect::<String>(),
1152 ));
1153 }
1154 }
1155
1156 let operation = base_buffer.update(cx, |base_buffer, cx| {
1157 // cx.emit(BufferEvent::DiffBaseChanged);
1158 base_buffer.edit(edits, None, cx)
1159 });
1160
1161 if let Some(operation) = operation
1162 && let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 {
1166 merged_operations.push(operation);
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1188 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1189 merged_operations.remove(ix);
1190 operation_to_undo = Some(operation.timestamp);
1191 }
1192
1193 self.apply_ops([operation.clone()], cx);
1194
1195 if let Some(timestamp) = operation_to_undo {
1196 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1197 self.undo_operations(counts, cx);
1198 }
1199 }
1200
1201 #[cfg(test)]
1202 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1203 &self.text
1204 }
1205
1206 /// Retrieve a snapshot of the buffer's raw text, without any
1207 /// language-related state like the syntax tree or diagnostics.
1208 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1209 self.text.snapshot()
1210 }
1211
1212 /// The file associated with the buffer, if any.
1213 pub fn file(&self) -> Option<&Arc<dyn File>> {
1214 self.file.as_ref()
1215 }
1216
1217 /// The version of the buffer that was last saved or reloaded from disk.
1218 pub fn saved_version(&self) -> &clock::Global {
1219 &self.saved_version
1220 }
1221
1222 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1223 pub fn saved_mtime(&self) -> Option<MTime> {
1224 self.saved_mtime
1225 }
1226
1227 /// Assign a language to the buffer.
1228 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1229 self.non_text_state_update_count += 1;
1230 self.syntax_map.lock().clear(&self.text);
1231 self.language = language;
1232 self.was_changed();
1233 self.reparse(cx);
1234 cx.emit(BufferEvent::LanguageChanged);
1235 }
1236
1237 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1238 /// other languages if parts of the buffer are written in different languages.
1239 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1240 self.syntax_map
1241 .lock()
1242 .set_language_registry(language_registry);
1243 }
1244
1245 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1246 self.syntax_map.lock().language_registry()
1247 }
1248
1249 /// Assign the buffer a new [`Capability`].
1250 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1251 self.capability = capability;
1252 cx.emit(BufferEvent::CapabilityChanged)
1253 }
1254
1255 /// This method is called to signal that the buffer has been saved.
1256 pub fn did_save(
1257 &mut self,
1258 version: clock::Global,
1259 mtime: Option<MTime>,
1260 cx: &mut Context<Self>,
1261 ) {
1262 self.saved_version = version;
1263 self.has_unsaved_edits
1264 .set((self.saved_version().clone(), false));
1265 self.has_conflict = false;
1266 self.saved_mtime = mtime;
1267 self.was_changed();
1268 cx.emit(BufferEvent::Saved);
1269 cx.notify();
1270 }
1271
1272 /// This method is called to signal that the buffer has been discarded.
1273 pub fn discarded(&self, cx: &mut Context<Self>) {
1274 cx.emit(BufferEvent::Discarded);
1275 cx.notify();
1276 }
1277
1278 /// Reloads the contents of the buffer from disk.
1279 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1280 let (tx, rx) = futures::channel::oneshot::channel();
1281 let prev_version = self.text.version();
1282 self.reload_task = Some(cx.spawn(async move |this, cx| {
1283 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1284 let file = this.file.as_ref()?.as_local()?;
1285
1286 Some((file.disk_state().mtime(), file.load(cx)))
1287 })?
1288 else {
1289 return Ok(());
1290 };
1291
1292 let new_text = new_text.await?;
1293 let diff = this
1294 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1295 .await;
1296 this.update(cx, |this, cx| {
1297 if this.version() == diff.base_version {
1298 this.finalize_last_transaction();
1299 this.apply_diff(diff, cx);
1300 tx.send(this.finalize_last_transaction().cloned()).ok();
1301 this.has_conflict = false;
1302 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1303 } else {
1304 if !diff.edits.is_empty()
1305 || this
1306 .edits_since::<usize>(&diff.base_version)
1307 .next()
1308 .is_some()
1309 {
1310 this.has_conflict = true;
1311 }
1312
1313 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1314 }
1315
1316 this.reload_task.take();
1317 })
1318 }));
1319 rx
1320 }
1321
1322 /// This method is called to signal that the buffer has been reloaded.
1323 pub fn did_reload(
1324 &mut self,
1325 version: clock::Global,
1326 line_ending: LineEnding,
1327 mtime: Option<MTime>,
1328 cx: &mut Context<Self>,
1329 ) {
1330 self.saved_version = version;
1331 self.has_unsaved_edits
1332 .set((self.saved_version.clone(), false));
1333 self.text.set_line_ending(line_ending);
1334 self.saved_mtime = mtime;
1335 cx.emit(BufferEvent::Reloaded);
1336 cx.notify();
1337 }
1338
1339 /// Updates the [`File`] backing this buffer. This should be called when
1340 /// the file has changed or has been deleted.
1341 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1342 let was_dirty = self.is_dirty();
1343 let mut file_changed = false;
1344
1345 if let Some(old_file) = self.file.as_ref() {
1346 if new_file.path() != old_file.path() {
1347 file_changed = true;
1348 }
1349
1350 let old_state = old_file.disk_state();
1351 let new_state = new_file.disk_state();
1352 if old_state != new_state {
1353 file_changed = true;
1354 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1355 cx.emit(BufferEvent::ReloadNeeded)
1356 }
1357 }
1358 } else {
1359 file_changed = true;
1360 };
1361
1362 self.file = Some(new_file);
1363 if file_changed {
1364 self.was_changed();
1365 self.non_text_state_update_count += 1;
1366 if was_dirty != self.is_dirty() {
1367 cx.emit(BufferEvent::DirtyChanged);
1368 }
1369 cx.emit(BufferEvent::FileHandleChanged);
1370 cx.notify();
1371 }
1372 }
1373
1374 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1375 Some(self.branch_state.as_ref()?.base_buffer.clone())
1376 }
1377
1378 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1379 pub fn language(&self) -> Option<&Arc<Language>> {
1380 self.language.as_ref()
1381 }
1382
1383 /// Returns the [`Language`] at the given location.
1384 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1385 let offset = position.to_offset(self);
1386 let mut is_first = true;
1387 let start_anchor = self.anchor_before(offset);
1388 let end_anchor = self.anchor_after(offset);
1389 self.syntax_map
1390 .lock()
1391 .layers_for_range(offset..offset, &self.text, false)
1392 .filter(|layer| {
1393 if is_first {
1394 is_first = false;
1395 return true;
1396 }
1397 let any_sub_ranges_contain_range = layer
1398 .included_sub_ranges
1399 .map(|sub_ranges| {
1400 sub_ranges.iter().any(|sub_range| {
1401 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1402 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1403 !is_before_start && !is_after_end
1404 })
1405 })
1406 .unwrap_or(true);
1407 let result = any_sub_ranges_contain_range;
1408 return result;
1409 })
1410 .last()
1411 .map(|info| info.language.clone())
1412 .or_else(|| self.language.clone())
1413 }
1414
1415 /// Returns each [`Language`] for the active syntax layers at the given location.
1416 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1417 let offset = position.to_offset(self);
1418 let mut languages: Vec<Arc<Language>> = self
1419 .syntax_map
1420 .lock()
1421 .layers_for_range(offset..offset, &self.text, false)
1422 .map(|info| info.language.clone())
1423 .collect();
1424
1425 if languages.is_empty()
1426 && let Some(buffer_language) = self.language() {
1427 languages.push(buffer_language.clone());
1428 }
1429
1430 languages
1431 }
1432
1433 /// An integer version number that accounts for all updates besides
1434 /// the buffer's text itself (which is versioned via a version vector).
1435 pub fn non_text_state_update_count(&self) -> usize {
1436 self.non_text_state_update_count
1437 }
1438
1439 /// Whether the buffer is being parsed in the background.
1440 #[cfg(any(test, feature = "test-support"))]
1441 pub fn is_parsing(&self) -> bool {
1442 self.reparse.is_some()
1443 }
1444
1445 /// Indicates whether the buffer contains any regions that may be
1446 /// written in a language that hasn't been loaded yet.
1447 pub fn contains_unknown_injections(&self) -> bool {
1448 self.syntax_map.lock().contains_unknown_injections()
1449 }
1450
1451 #[cfg(any(test, feature = "test-support"))]
1452 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1453 self.sync_parse_timeout = timeout;
1454 }
1455
1456 /// Called after an edit to synchronize the buffer's main parse tree with
1457 /// the buffer's new underlying state.
1458 ///
1459 /// Locks the syntax map and interpolates the edits since the last reparse
1460 /// into the foreground syntax tree.
1461 ///
1462 /// Then takes a stable snapshot of the syntax map before unlocking it.
1463 /// The snapshot with the interpolated edits is sent to a background thread,
1464 /// where we ask Tree-sitter to perform an incremental parse.
1465 ///
1466 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1467 /// waiting on the parse to complete. As soon as it completes, we proceed
1468 /// synchronously, unless a 1ms timeout elapses.
1469 ///
1470 /// If we time out waiting on the parse, we spawn a second task waiting
1471 /// until the parse does complete and return with the interpolated tree still
1472 /// in the foreground. When the background parse completes, call back into
1473 /// the main thread and assign the foreground parse state.
1474 ///
1475 /// If the buffer or grammar changed since the start of the background parse,
1476 /// initiate an additional reparse recursively. To avoid concurrent parses
1477 /// for the same buffer, we only initiate a new parse if we are not already
1478 /// parsing in the background.
1479 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1480 if self.reparse.is_some() {
1481 return;
1482 }
1483 let language = if let Some(language) = self.language.clone() {
1484 language
1485 } else {
1486 return;
1487 };
1488
1489 let text = self.text_snapshot();
1490 let parsed_version = self.version();
1491
1492 let mut syntax_map = self.syntax_map.lock();
1493 syntax_map.interpolate(&text);
1494 let language_registry = syntax_map.language_registry();
1495 let mut syntax_snapshot = syntax_map.snapshot();
1496 drop(syntax_map);
1497
1498 let parse_task = cx.background_spawn({
1499 let language = language.clone();
1500 let language_registry = language_registry.clone();
1501 async move {
1502 syntax_snapshot.reparse(&text, language_registry, language);
1503 syntax_snapshot
1504 }
1505 });
1506
1507 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1508 match cx
1509 .background_executor()
1510 .block_with_timeout(self.sync_parse_timeout, parse_task)
1511 {
1512 Ok(new_syntax_snapshot) => {
1513 self.did_finish_parsing(new_syntax_snapshot, cx);
1514 self.reparse = None;
1515 }
1516 Err(parse_task) => {
1517 self.reparse = Some(cx.spawn(async move |this, cx| {
1518 let new_syntax_map = parse_task.await;
1519 this.update(cx, move |this, cx| {
1520 let grammar_changed =
1521 this.language.as_ref().map_or(true, |current_language| {
1522 !Arc::ptr_eq(&language, current_language)
1523 });
1524 let language_registry_changed = new_syntax_map
1525 .contains_unknown_injections()
1526 && language_registry.map_or(false, |registry| {
1527 registry.version() != new_syntax_map.language_registry_version()
1528 });
1529 let parse_again = language_registry_changed
1530 || grammar_changed
1531 || this.version.changed_since(&parsed_version);
1532 this.did_finish_parsing(new_syntax_map, cx);
1533 this.reparse = None;
1534 if parse_again {
1535 this.reparse(cx);
1536 }
1537 })
1538 .ok();
1539 }));
1540 }
1541 }
1542 }
1543
1544 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1545 self.was_changed();
1546 self.non_text_state_update_count += 1;
1547 self.syntax_map.lock().did_parse(syntax_snapshot);
1548 self.request_autoindent(cx);
1549 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1550 cx.emit(BufferEvent::Reparsed);
1551 cx.notify();
1552 }
1553
1554 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1555 self.parse_status.1.clone()
1556 }
1557
1558 /// Assign to the buffer a set of diagnostics created by a given language server.
1559 pub fn update_diagnostics(
1560 &mut self,
1561 server_id: LanguageServerId,
1562 diagnostics: DiagnosticSet,
1563 cx: &mut Context<Self>,
1564 ) {
1565 let lamport_timestamp = self.text.lamport_clock.tick();
1566 let op = Operation::UpdateDiagnostics {
1567 server_id,
1568 diagnostics: diagnostics.iter().cloned().collect(),
1569 lamport_timestamp,
1570 };
1571
1572 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1573 self.send_operation(op, true, cx);
1574 }
1575
1576 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1577 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1578 return None;
1579 };
1580 Some(&self.diagnostics[idx].1)
1581 }
1582
1583 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1584 if let Some(indent_sizes) = self.compute_autoindents() {
1585 let indent_sizes = cx.background_spawn(indent_sizes);
1586 match cx
1587 .background_executor()
1588 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1589 {
1590 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1591 Err(indent_sizes) => {
1592 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1593 let indent_sizes = indent_sizes.await;
1594 this.update(cx, |this, cx| {
1595 this.apply_autoindents(indent_sizes, cx);
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 } else {
1602 self.autoindent_requests.clear();
1603 for tx in self.wait_for_autoindent_txs.drain(..) {
1604 tx.send(()).ok();
1605 }
1606 }
1607 }
1608
1609 fn compute_autoindents(
1610 &self,
1611 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1612 let max_rows_between_yields = 100;
1613 let snapshot = self.snapshot();
1614 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1615 return None;
1616 }
1617
1618 let autoindent_requests = self.autoindent_requests.clone();
1619 Some(async move {
1620 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1621 for request in autoindent_requests {
1622 // Resolve each edited range to its row in the current buffer and in the
1623 // buffer before this batch of edits.
1624 let mut row_ranges = Vec::new();
1625 let mut old_to_new_rows = BTreeMap::new();
1626 let mut language_indent_sizes_by_new_row = Vec::new();
1627 for entry in &request.entries {
1628 let position = entry.range.start;
1629 let new_row = position.to_point(&snapshot).row;
1630 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1631 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1632
1633 if !entry.first_line_is_new {
1634 let old_row = position.to_point(&request.before_edit).row;
1635 old_to_new_rows.insert(old_row, new_row);
1636 }
1637 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1638 }
1639
1640 // Build a map containing the suggested indentation for each of the edited lines
1641 // with respect to the state of the buffer before these edits. This map is keyed
1642 // by the rows for these lines in the current state of the buffer.
1643 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1644 let old_edited_ranges =
1645 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1646 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1647 let mut language_indent_size = IndentSize::default();
1648 for old_edited_range in old_edited_ranges {
1649 let suggestions = request
1650 .before_edit
1651 .suggest_autoindents(old_edited_range.clone())
1652 .into_iter()
1653 .flatten();
1654 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1655 if let Some(suggestion) = suggestion {
1656 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1657
1658 // Find the indent size based on the language for this row.
1659 while let Some((row, size)) = language_indent_sizes.peek() {
1660 if *row > new_row {
1661 break;
1662 }
1663 language_indent_size = *size;
1664 language_indent_sizes.next();
1665 }
1666
1667 let suggested_indent = old_to_new_rows
1668 .get(&suggestion.basis_row)
1669 .and_then(|from_row| {
1670 Some(old_suggestions.get(from_row).copied()?.0)
1671 })
1672 .unwrap_or_else(|| {
1673 request
1674 .before_edit
1675 .indent_size_for_line(suggestion.basis_row)
1676 })
1677 .with_delta(suggestion.delta, language_indent_size);
1678 old_suggestions
1679 .insert(new_row, (suggested_indent, suggestion.within_error));
1680 }
1681 }
1682 yield_now().await;
1683 }
1684
1685 // Compute new suggestions for each line, but only include them in the result
1686 // if they differ from the old suggestion for that line.
1687 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1688 let mut language_indent_size = IndentSize::default();
1689 for (row_range, original_indent_column) in row_ranges {
1690 let new_edited_row_range = if request.is_block_mode {
1691 row_range.start..row_range.start + 1
1692 } else {
1693 row_range.clone()
1694 };
1695
1696 let suggestions = snapshot
1697 .suggest_autoindents(new_edited_row_range.clone())
1698 .into_iter()
1699 .flatten();
1700 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1701 if let Some(suggestion) = suggestion {
1702 // Find the indent size based on the language for this row.
1703 while let Some((row, size)) = language_indent_sizes.peek() {
1704 if *row > new_row {
1705 break;
1706 }
1707 language_indent_size = *size;
1708 language_indent_sizes.next();
1709 }
1710
1711 let suggested_indent = indent_sizes
1712 .get(&suggestion.basis_row)
1713 .copied()
1714 .map(|e| e.0)
1715 .unwrap_or_else(|| {
1716 snapshot.indent_size_for_line(suggestion.basis_row)
1717 })
1718 .with_delta(suggestion.delta, language_indent_size);
1719
1720 if old_suggestions.get(&new_row).map_or(
1721 true,
1722 |(old_indentation, was_within_error)| {
1723 suggested_indent != *old_indentation
1724 && (!suggestion.within_error || *was_within_error)
1725 },
1726 ) {
1727 indent_sizes.insert(
1728 new_row,
1729 (suggested_indent, request.ignore_empty_lines),
1730 );
1731 }
1732 }
1733 }
1734
1735 if let (true, Some(original_indent_column)) =
1736 (request.is_block_mode, original_indent_column)
1737 {
1738 let new_indent =
1739 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1740 *indent
1741 } else {
1742 snapshot.indent_size_for_line(row_range.start)
1743 };
1744 let delta = new_indent.len as i64 - original_indent_column as i64;
1745 if delta != 0 {
1746 for row in row_range.skip(1) {
1747 indent_sizes.entry(row).or_insert_with(|| {
1748 let mut size = snapshot.indent_size_for_line(row);
1749 if size.kind == new_indent.kind {
1750 match delta.cmp(&0) {
1751 Ordering::Greater => size.len += delta as u32,
1752 Ordering::Less => {
1753 size.len = size.len.saturating_sub(-delta as u32)
1754 }
1755 Ordering::Equal => {}
1756 }
1757 }
1758 (size, request.ignore_empty_lines)
1759 });
1760 }
1761 }
1762 }
1763
1764 yield_now().await;
1765 }
1766 }
1767
1768 indent_sizes
1769 .into_iter()
1770 .filter_map(|(row, (indent, ignore_empty_lines))| {
1771 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1772 None
1773 } else {
1774 Some((row, indent))
1775 }
1776 })
1777 .collect()
1778 })
1779 }
1780
1781 fn apply_autoindents(
1782 &mut self,
1783 indent_sizes: BTreeMap<u32, IndentSize>,
1784 cx: &mut Context<Self>,
1785 ) {
1786 self.autoindent_requests.clear();
1787 for tx in self.wait_for_autoindent_txs.drain(..) {
1788 tx.send(()).ok();
1789 }
1790
1791 let edits: Vec<_> = indent_sizes
1792 .into_iter()
1793 .filter_map(|(row, indent_size)| {
1794 let current_size = indent_size_for_line(self, row);
1795 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1796 })
1797 .collect();
1798
1799 let preserve_preview = self.preserve_preview();
1800 self.edit(edits, None, cx);
1801 if preserve_preview {
1802 self.refresh_preview();
1803 }
1804 }
1805
1806 /// Create a minimal edit that will cause the given row to be indented
1807 /// with the given size. After applying this edit, the length of the line
1808 /// will always be at least `new_size.len`.
1809 pub fn edit_for_indent_size_adjustment(
1810 row: u32,
1811 current_size: IndentSize,
1812 new_size: IndentSize,
1813 ) -> Option<(Range<Point>, String)> {
1814 if new_size.kind == current_size.kind {
1815 match new_size.len.cmp(¤t_size.len) {
1816 Ordering::Greater => {
1817 let point = Point::new(row, 0);
1818 Some((
1819 point..point,
1820 iter::repeat(new_size.char())
1821 .take((new_size.len - current_size.len) as usize)
1822 .collect::<String>(),
1823 ))
1824 }
1825
1826 Ordering::Less => Some((
1827 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1828 String::new(),
1829 )),
1830
1831 Ordering::Equal => None,
1832 }
1833 } else {
1834 Some((
1835 Point::new(row, 0)..Point::new(row, current_size.len),
1836 iter::repeat(new_size.char())
1837 .take(new_size.len as usize)
1838 .collect::<String>(),
1839 ))
1840 }
1841 }
1842
1843 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1844 /// and the given new text.
1845 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1846 let old_text = self.as_rope().clone();
1847 let base_version = self.version();
1848 cx.background_executor()
1849 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1850 let old_text = old_text.to_string();
1851 let line_ending = LineEnding::detect(&new_text);
1852 LineEnding::normalize(&mut new_text);
1853 let edits = text_diff(&old_text, &new_text);
1854 Diff {
1855 base_version,
1856 line_ending,
1857 edits,
1858 }
1859 })
1860 }
1861
1862 /// Spawns a background task that searches the buffer for any whitespace
1863 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1864 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1865 let old_text = self.as_rope().clone();
1866 let line_ending = self.line_ending();
1867 let base_version = self.version();
1868 cx.background_spawn(async move {
1869 let ranges = trailing_whitespace_ranges(&old_text);
1870 let empty = Arc::<str>::from("");
1871 Diff {
1872 base_version,
1873 line_ending,
1874 edits: ranges
1875 .into_iter()
1876 .map(|range| (range, empty.clone()))
1877 .collect(),
1878 }
1879 })
1880 }
1881
1882 /// Ensures that the buffer ends with a single newline character, and
1883 /// no other whitespace. Skips if the buffer is empty.
1884 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1885 let len = self.len();
1886 if len == 0 {
1887 return;
1888 }
1889 let mut offset = len;
1890 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1891 let non_whitespace_len = chunk
1892 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1893 .len();
1894 offset -= chunk.len();
1895 offset += non_whitespace_len;
1896 if non_whitespace_len != 0 {
1897 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1898 return;
1899 }
1900 break;
1901 }
1902 }
1903 self.edit([(offset..len, "\n")], None, cx);
1904 }
1905
1906 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1907 /// calculated, then adjust the diff to account for those changes, and discard any
1908 /// parts of the diff that conflict with those changes.
1909 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1910 let snapshot = self.snapshot();
1911 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1912 let mut delta = 0;
1913 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1914 while let Some(edit_since) = edits_since.peek() {
1915 // If the edit occurs after a diff hunk, then it does not
1916 // affect that hunk.
1917 if edit_since.old.start > range.end {
1918 break;
1919 }
1920 // If the edit precedes the diff hunk, then adjust the hunk
1921 // to reflect the edit.
1922 else if edit_since.old.end < range.start {
1923 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1924 edits_since.next();
1925 }
1926 // If the edit intersects a diff hunk, then discard that hunk.
1927 else {
1928 return None;
1929 }
1930 }
1931
1932 let start = (range.start as i64 + delta) as usize;
1933 let end = (range.end as i64 + delta) as usize;
1934 Some((start..end, new_text))
1935 });
1936
1937 self.start_transaction();
1938 self.text.set_line_ending(diff.line_ending);
1939 self.edit(adjusted_edits, None, cx);
1940 self.end_transaction(cx)
1941 }
1942
1943 fn has_unsaved_edits(&self) -> bool {
1944 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1945
1946 if last_version == self.version {
1947 self.has_unsaved_edits
1948 .set((last_version, has_unsaved_edits));
1949 return has_unsaved_edits;
1950 }
1951
1952 let has_edits = self.has_edits_since(&self.saved_version);
1953 self.has_unsaved_edits
1954 .set((self.version.clone(), has_edits));
1955 has_edits
1956 }
1957
1958 /// Checks if the buffer has unsaved changes.
1959 pub fn is_dirty(&self) -> bool {
1960 if self.capability == Capability::ReadOnly {
1961 return false;
1962 }
1963 if self.has_conflict {
1964 return true;
1965 }
1966 match self.file.as_ref().map(|f| f.disk_state()) {
1967 Some(DiskState::New) | Some(DiskState::Deleted) => {
1968 !self.is_empty() && self.has_unsaved_edits()
1969 }
1970 _ => self.has_unsaved_edits(),
1971 }
1972 }
1973
1974 /// Checks if the buffer and its file have both changed since the buffer
1975 /// was last saved or reloaded.
1976 pub fn has_conflict(&self) -> bool {
1977 if self.has_conflict {
1978 return true;
1979 }
1980 let Some(file) = self.file.as_ref() else {
1981 return false;
1982 };
1983 match file.disk_state() {
1984 DiskState::New => false,
1985 DiskState::Present { mtime } => match self.saved_mtime {
1986 Some(saved_mtime) => {
1987 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1988 }
1989 None => true,
1990 },
1991 DiskState::Deleted => false,
1992 }
1993 }
1994
1995 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1996 pub fn subscribe(&mut self) -> Subscription {
1997 self.text.subscribe()
1998 }
1999
2000 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2001 ///
2002 /// This allows downstream code to check if the buffer's text has changed without
2003 /// waiting for an effect cycle, which would be required if using eents.
2004 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2005 if let Err(ix) = self
2006 .change_bits
2007 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2008 {
2009 self.change_bits.insert(ix, bit);
2010 }
2011 }
2012
2013 fn was_changed(&mut self) {
2014 self.change_bits.retain(|change_bit| {
2015 change_bit.upgrade().map_or(false, |bit| {
2016 bit.replace(true);
2017 true
2018 })
2019 });
2020 }
2021
2022 /// Starts a transaction, if one is not already in-progress. When undoing or
2023 /// redoing edits, all of the edits performed within a transaction are undone
2024 /// or redone together.
2025 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2026 self.start_transaction_at(Instant::now())
2027 }
2028
2029 /// Starts a transaction, providing the current time. Subsequent transactions
2030 /// that occur within a short period of time will be grouped together. This
2031 /// is controlled by the buffer's undo grouping duration.
2032 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2033 self.transaction_depth += 1;
2034 if self.was_dirty_before_starting_transaction.is_none() {
2035 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2036 }
2037 self.text.start_transaction_at(now)
2038 }
2039
2040 /// Terminates the current transaction, if this is the outermost transaction.
2041 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2042 self.end_transaction_at(Instant::now(), cx)
2043 }
2044
2045 /// Terminates the current transaction, providing the current time. Subsequent transactions
2046 /// that occur within a short period of time will be grouped together. This
2047 /// is controlled by the buffer's undo grouping duration.
2048 pub fn end_transaction_at(
2049 &mut self,
2050 now: Instant,
2051 cx: &mut Context<Self>,
2052 ) -> Option<TransactionId> {
2053 assert!(self.transaction_depth > 0);
2054 self.transaction_depth -= 1;
2055 let was_dirty = if self.transaction_depth == 0 {
2056 self.was_dirty_before_starting_transaction.take().unwrap()
2057 } else {
2058 false
2059 };
2060 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2061 self.did_edit(&start_version, was_dirty, cx);
2062 Some(transaction_id)
2063 } else {
2064 None
2065 }
2066 }
2067
2068 /// Manually add a transaction to the buffer's undo history.
2069 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2070 self.text.push_transaction(transaction, now);
2071 }
2072
2073 /// Differs from `push_transaction` in that it does not clear the redo
2074 /// stack. Intended to be used to create a parent transaction to merge
2075 /// potential child transactions into.
2076 ///
2077 /// The caller is responsible for removing it from the undo history using
2078 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2079 /// are merged into this transaction, the caller is responsible for ensuring
2080 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2081 /// cleared is to create transactions with the usual `start_transaction` and
2082 /// `end_transaction` methods and merging the resulting transactions into
2083 /// the transaction created by this method
2084 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2085 self.text.push_empty_transaction(now)
2086 }
2087
2088 /// Prevent the last transaction from being grouped with any subsequent transactions,
2089 /// even if they occur with the buffer's undo grouping duration.
2090 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2091 self.text.finalize_last_transaction()
2092 }
2093
2094 /// Manually group all changes since a given transaction.
2095 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2096 self.text.group_until_transaction(transaction_id);
2097 }
2098
2099 /// Manually remove a transaction from the buffer's undo history
2100 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2101 self.text.forget_transaction(transaction_id)
2102 }
2103
2104 /// Retrieve a transaction from the buffer's undo history
2105 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2106 self.text.get_transaction(transaction_id)
2107 }
2108
2109 /// Manually merge two transactions in the buffer's undo history.
2110 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2111 self.text.merge_transactions(transaction, destination);
2112 }
2113
2114 /// Waits for the buffer to receive operations with the given timestamps.
2115 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2116 &mut self,
2117 edit_ids: It,
2118 ) -> impl Future<Output = Result<()>> + use<It> {
2119 self.text.wait_for_edits(edit_ids)
2120 }
2121
2122 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2123 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2124 &mut self,
2125 anchors: It,
2126 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2127 self.text.wait_for_anchors(anchors)
2128 }
2129
2130 /// Waits for the buffer to receive operations up to the given version.
2131 pub fn wait_for_version(
2132 &mut self,
2133 version: clock::Global,
2134 ) -> impl Future<Output = Result<()>> + use<> {
2135 self.text.wait_for_version(version)
2136 }
2137
2138 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2139 /// [`Buffer::wait_for_version`] to resolve with an error.
2140 pub fn give_up_waiting(&mut self) {
2141 self.text.give_up_waiting();
2142 }
2143
2144 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2145 let mut rx = None;
2146 if !self.autoindent_requests.is_empty() {
2147 let channel = oneshot::channel();
2148 self.wait_for_autoindent_txs.push(channel.0);
2149 rx = Some(channel.1);
2150 }
2151 rx
2152 }
2153
2154 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2155 pub fn set_active_selections(
2156 &mut self,
2157 selections: Arc<[Selection<Anchor>]>,
2158 line_mode: bool,
2159 cursor_shape: CursorShape,
2160 cx: &mut Context<Self>,
2161 ) {
2162 let lamport_timestamp = self.text.lamport_clock.tick();
2163 self.remote_selections.insert(
2164 self.text.replica_id(),
2165 SelectionSet {
2166 selections: selections.clone(),
2167 lamport_timestamp,
2168 line_mode,
2169 cursor_shape,
2170 },
2171 );
2172 self.send_operation(
2173 Operation::UpdateSelections {
2174 selections,
2175 line_mode,
2176 lamport_timestamp,
2177 cursor_shape,
2178 },
2179 true,
2180 cx,
2181 );
2182 self.non_text_state_update_count += 1;
2183 cx.notify();
2184 }
2185
2186 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2187 /// this replica.
2188 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2189 if self
2190 .remote_selections
2191 .get(&self.text.replica_id())
2192 .map_or(true, |set| !set.selections.is_empty())
2193 {
2194 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2195 }
2196 }
2197
2198 pub fn set_agent_selections(
2199 &mut self,
2200 selections: Arc<[Selection<Anchor>]>,
2201 line_mode: bool,
2202 cursor_shape: CursorShape,
2203 cx: &mut Context<Self>,
2204 ) {
2205 let lamport_timestamp = self.text.lamport_clock.tick();
2206 self.remote_selections.insert(
2207 AGENT_REPLICA_ID,
2208 SelectionSet {
2209 selections: selections.clone(),
2210 lamport_timestamp,
2211 line_mode,
2212 cursor_shape,
2213 },
2214 );
2215 self.non_text_state_update_count += 1;
2216 cx.notify();
2217 }
2218
2219 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2220 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2221 }
2222
2223 /// Replaces the buffer's entire text.
2224 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2225 where
2226 T: Into<Arc<str>>,
2227 {
2228 self.autoindent_requests.clear();
2229 self.edit([(0..self.len(), text)], None, cx)
2230 }
2231
2232 /// Appends the given text to the end of the buffer.
2233 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2234 where
2235 T: Into<Arc<str>>,
2236 {
2237 self.edit([(self.len()..self.len(), text)], None, cx)
2238 }
2239
2240 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2241 /// delete, and a string of text to insert at that location.
2242 ///
2243 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2244 /// request for the edited ranges, which will be processed when the buffer finishes
2245 /// parsing.
2246 ///
2247 /// Parsing takes place at the end of a transaction, and may compute synchronously
2248 /// or asynchronously, depending on the changes.
2249 pub fn edit<I, S, T>(
2250 &mut self,
2251 edits_iter: I,
2252 autoindent_mode: Option<AutoindentMode>,
2253 cx: &mut Context<Self>,
2254 ) -> Option<clock::Lamport>
2255 where
2256 I: IntoIterator<Item = (Range<S>, T)>,
2257 S: ToOffset,
2258 T: Into<Arc<str>>,
2259 {
2260 // Skip invalid edits and coalesce contiguous ones.
2261 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2262
2263 for (range, new_text) in edits_iter {
2264 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2265
2266 if range.start > range.end {
2267 mem::swap(&mut range.start, &mut range.end);
2268 }
2269 let new_text = new_text.into();
2270 if !new_text.is_empty() || !range.is_empty() {
2271 if let Some((prev_range, prev_text)) = edits.last_mut()
2272 && prev_range.end >= range.start
2273 {
2274 prev_range.end = cmp::max(prev_range.end, range.end);
2275 *prev_text = format!("{prev_text}{new_text}").into();
2276 } else {
2277 edits.push((range, new_text));
2278 }
2279 }
2280 }
2281 if edits.is_empty() {
2282 return None;
2283 }
2284
2285 self.start_transaction();
2286 self.pending_autoindent.take();
2287 let autoindent_request = autoindent_mode
2288 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2289
2290 let edit_operation = self.text.edit(edits.iter().cloned());
2291 let edit_id = edit_operation.timestamp();
2292
2293 if let Some((before_edit, mode)) = autoindent_request {
2294 let mut delta = 0isize;
2295 let mut previous_setting = None;
2296 let entries: Vec<_> = edits
2297 .into_iter()
2298 .enumerate()
2299 .zip(&edit_operation.as_edit().unwrap().new_text)
2300 .filter(|((_, (range, _)), _)| {
2301 let language = before_edit.language_at(range.start);
2302 let language_id = language.map(|l| l.id());
2303 if let Some((cached_language_id, auto_indent)) = previous_setting
2304 && cached_language_id == language_id
2305 {
2306 auto_indent
2307 } else {
2308 // The auto-indent setting is not present in editorconfigs, hence
2309 // we can avoid passing the file here.
2310 let auto_indent =
2311 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2312 previous_setting = Some((language_id, auto_indent));
2313 auto_indent
2314 }
2315 })
2316 .map(|((ix, (range, _)), new_text)| {
2317 let new_text_length = new_text.len();
2318 let old_start = range.start.to_point(&before_edit);
2319 let new_start = (delta + range.start as isize) as usize;
2320 let range_len = range.end - range.start;
2321 delta += new_text_length as isize - range_len as isize;
2322
2323 // Decide what range of the insertion to auto-indent, and whether
2324 // the first line of the insertion should be considered a newly-inserted line
2325 // or an edit to an existing line.
2326 let mut range_of_insertion_to_indent = 0..new_text_length;
2327 let mut first_line_is_new = true;
2328
2329 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2330 let old_line_end = before_edit.line_len(old_start.row);
2331
2332 if old_start.column > old_line_start {
2333 first_line_is_new = false;
2334 }
2335
2336 if !new_text.contains('\n')
2337 && (old_start.column + (range_len as u32) < old_line_end
2338 || old_line_end == old_line_start)
2339 {
2340 first_line_is_new = false;
2341 }
2342
2343 // When inserting text starting with a newline, avoid auto-indenting the
2344 // previous line.
2345 if new_text.starts_with('\n') {
2346 range_of_insertion_to_indent.start += 1;
2347 first_line_is_new = true;
2348 }
2349
2350 let mut original_indent_column = None;
2351 if let AutoindentMode::Block {
2352 original_indent_columns,
2353 } = &mode
2354 {
2355 original_indent_column = Some(if new_text.starts_with('\n') {
2356 indent_size_for_text(
2357 new_text[range_of_insertion_to_indent.clone()].chars(),
2358 )
2359 .len
2360 } else {
2361 original_indent_columns
2362 .get(ix)
2363 .copied()
2364 .flatten()
2365 .unwrap_or_else(|| {
2366 indent_size_for_text(
2367 new_text[range_of_insertion_to_indent.clone()].chars(),
2368 )
2369 .len
2370 })
2371 });
2372
2373 // Avoid auto-indenting the line after the edit.
2374 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2375 range_of_insertion_to_indent.end -= 1;
2376 }
2377 }
2378
2379 AutoindentRequestEntry {
2380 first_line_is_new,
2381 original_indent_column,
2382 indent_size: before_edit.language_indent_size_at(range.start, cx),
2383 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2384 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2385 }
2386 })
2387 .collect();
2388
2389 if !entries.is_empty() {
2390 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2391 before_edit,
2392 entries,
2393 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2394 ignore_empty_lines: false,
2395 }));
2396 }
2397 }
2398
2399 self.end_transaction(cx);
2400 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2401 Some(edit_id)
2402 }
2403
2404 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2405 self.was_changed();
2406
2407 if self.edits_since::<usize>(old_version).next().is_none() {
2408 return;
2409 }
2410
2411 self.reparse(cx);
2412 cx.emit(BufferEvent::Edited);
2413 if was_dirty != self.is_dirty() {
2414 cx.emit(BufferEvent::DirtyChanged);
2415 }
2416 cx.notify();
2417 }
2418
2419 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2420 where
2421 I: IntoIterator<Item = Range<T>>,
2422 T: ToOffset + Copy,
2423 {
2424 let before_edit = self.snapshot();
2425 let entries = ranges
2426 .into_iter()
2427 .map(|range| AutoindentRequestEntry {
2428 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2429 first_line_is_new: true,
2430 indent_size: before_edit.language_indent_size_at(range.start, cx),
2431 original_indent_column: None,
2432 })
2433 .collect();
2434 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2435 before_edit,
2436 entries,
2437 is_block_mode: false,
2438 ignore_empty_lines: true,
2439 }));
2440 self.request_autoindent(cx);
2441 }
2442
2443 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2444 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2445 pub fn insert_empty_line(
2446 &mut self,
2447 position: impl ToPoint,
2448 space_above: bool,
2449 space_below: bool,
2450 cx: &mut Context<Self>,
2451 ) -> Point {
2452 let mut position = position.to_point(self);
2453
2454 self.start_transaction();
2455
2456 self.edit(
2457 [(position..position, "\n")],
2458 Some(AutoindentMode::EachLine),
2459 cx,
2460 );
2461
2462 if position.column > 0 {
2463 position += Point::new(1, 0);
2464 }
2465
2466 if !self.is_line_blank(position.row) {
2467 self.edit(
2468 [(position..position, "\n")],
2469 Some(AutoindentMode::EachLine),
2470 cx,
2471 );
2472 }
2473
2474 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2475 self.edit(
2476 [(position..position, "\n")],
2477 Some(AutoindentMode::EachLine),
2478 cx,
2479 );
2480 position.row += 1;
2481 }
2482
2483 if space_below
2484 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2485 {
2486 self.edit(
2487 [(position..position, "\n")],
2488 Some(AutoindentMode::EachLine),
2489 cx,
2490 );
2491 }
2492
2493 self.end_transaction(cx);
2494
2495 position
2496 }
2497
2498 /// Applies the given remote operations to the buffer.
2499 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2500 self.pending_autoindent.take();
2501 let was_dirty = self.is_dirty();
2502 let old_version = self.version.clone();
2503 let mut deferred_ops = Vec::new();
2504 let buffer_ops = ops
2505 .into_iter()
2506 .filter_map(|op| match op {
2507 Operation::Buffer(op) => Some(op),
2508 _ => {
2509 if self.can_apply_op(&op) {
2510 self.apply_op(op, cx);
2511 } else {
2512 deferred_ops.push(op);
2513 }
2514 None
2515 }
2516 })
2517 .collect::<Vec<_>>();
2518 for operation in buffer_ops.iter() {
2519 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2520 }
2521 self.text.apply_ops(buffer_ops);
2522 self.deferred_ops.insert(deferred_ops);
2523 self.flush_deferred_ops(cx);
2524 self.did_edit(&old_version, was_dirty, cx);
2525 // Notify independently of whether the buffer was edited as the operations could include a
2526 // selection update.
2527 cx.notify();
2528 }
2529
2530 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2531 let mut deferred_ops = Vec::new();
2532 for op in self.deferred_ops.drain().iter().cloned() {
2533 if self.can_apply_op(&op) {
2534 self.apply_op(op, cx);
2535 } else {
2536 deferred_ops.push(op);
2537 }
2538 }
2539 self.deferred_ops.insert(deferred_ops);
2540 }
2541
2542 pub fn has_deferred_ops(&self) -> bool {
2543 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2544 }
2545
2546 fn can_apply_op(&self, operation: &Operation) -> bool {
2547 match operation {
2548 Operation::Buffer(_) => {
2549 unreachable!("buffer operations should never be applied at this layer")
2550 }
2551 Operation::UpdateDiagnostics {
2552 diagnostics: diagnostic_set,
2553 ..
2554 } => diagnostic_set.iter().all(|diagnostic| {
2555 self.text.can_resolve(&diagnostic.range.start)
2556 && self.text.can_resolve(&diagnostic.range.end)
2557 }),
2558 Operation::UpdateSelections { selections, .. } => selections
2559 .iter()
2560 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2561 Operation::UpdateCompletionTriggers { .. } => true,
2562 }
2563 }
2564
2565 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2566 match operation {
2567 Operation::Buffer(_) => {
2568 unreachable!("buffer operations should never be applied at this layer")
2569 }
2570 Operation::UpdateDiagnostics {
2571 server_id,
2572 diagnostics: diagnostic_set,
2573 lamport_timestamp,
2574 } => {
2575 let snapshot = self.snapshot();
2576 self.apply_diagnostic_update(
2577 server_id,
2578 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2579 lamport_timestamp,
2580 cx,
2581 );
2582 }
2583 Operation::UpdateSelections {
2584 selections,
2585 lamport_timestamp,
2586 line_mode,
2587 cursor_shape,
2588 } => {
2589 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2590 && set.lamport_timestamp > lamport_timestamp {
2591 return;
2592 }
2593
2594 self.remote_selections.insert(
2595 lamport_timestamp.replica_id,
2596 SelectionSet {
2597 selections,
2598 lamport_timestamp,
2599 line_mode,
2600 cursor_shape,
2601 },
2602 );
2603 self.text.lamport_clock.observe(lamport_timestamp);
2604 self.non_text_state_update_count += 1;
2605 }
2606 Operation::UpdateCompletionTriggers {
2607 triggers,
2608 lamport_timestamp,
2609 server_id,
2610 } => {
2611 if triggers.is_empty() {
2612 self.completion_triggers_per_language_server
2613 .remove(&server_id);
2614 self.completion_triggers = self
2615 .completion_triggers_per_language_server
2616 .values()
2617 .flat_map(|triggers| triggers.into_iter().cloned())
2618 .collect();
2619 } else {
2620 self.completion_triggers_per_language_server
2621 .insert(server_id, triggers.iter().cloned().collect());
2622 self.completion_triggers.extend(triggers);
2623 }
2624 self.text.lamport_clock.observe(lamport_timestamp);
2625 }
2626 }
2627 }
2628
2629 fn apply_diagnostic_update(
2630 &mut self,
2631 server_id: LanguageServerId,
2632 diagnostics: DiagnosticSet,
2633 lamport_timestamp: clock::Lamport,
2634 cx: &mut Context<Self>,
2635 ) {
2636 if lamport_timestamp > self.diagnostics_timestamp {
2637 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2638 if diagnostics.is_empty() {
2639 if let Ok(ix) = ix {
2640 self.diagnostics.remove(ix);
2641 }
2642 } else {
2643 match ix {
2644 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2645 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2646 };
2647 }
2648 self.diagnostics_timestamp = lamport_timestamp;
2649 self.non_text_state_update_count += 1;
2650 self.text.lamport_clock.observe(lamport_timestamp);
2651 cx.notify();
2652 cx.emit(BufferEvent::DiagnosticsUpdated);
2653 }
2654 }
2655
2656 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2657 self.was_changed();
2658 cx.emit(BufferEvent::Operation {
2659 operation,
2660 is_local,
2661 });
2662 }
2663
2664 /// Removes the selections for a given peer.
2665 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2666 self.remote_selections.remove(&replica_id);
2667 cx.notify();
2668 }
2669
2670 /// Undoes the most recent transaction.
2671 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2672 let was_dirty = self.is_dirty();
2673 let old_version = self.version.clone();
2674
2675 if let Some((transaction_id, operation)) = self.text.undo() {
2676 self.send_operation(Operation::Buffer(operation), true, cx);
2677 self.did_edit(&old_version, was_dirty, cx);
2678 Some(transaction_id)
2679 } else {
2680 None
2681 }
2682 }
2683
2684 /// Manually undoes a specific transaction in the buffer's undo history.
2685 pub fn undo_transaction(
2686 &mut self,
2687 transaction_id: TransactionId,
2688 cx: &mut Context<Self>,
2689 ) -> bool {
2690 let was_dirty = self.is_dirty();
2691 let old_version = self.version.clone();
2692 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2693 self.send_operation(Operation::Buffer(operation), true, cx);
2694 self.did_edit(&old_version, was_dirty, cx);
2695 true
2696 } else {
2697 false
2698 }
2699 }
2700
2701 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2702 pub fn undo_to_transaction(
2703 &mut self,
2704 transaction_id: TransactionId,
2705 cx: &mut Context<Self>,
2706 ) -> bool {
2707 let was_dirty = self.is_dirty();
2708 let old_version = self.version.clone();
2709
2710 let operations = self.text.undo_to_transaction(transaction_id);
2711 let undone = !operations.is_empty();
2712 for operation in operations {
2713 self.send_operation(Operation::Buffer(operation), true, cx);
2714 }
2715 if undone {
2716 self.did_edit(&old_version, was_dirty, cx)
2717 }
2718 undone
2719 }
2720
2721 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2722 let was_dirty = self.is_dirty();
2723 let operation = self.text.undo_operations(counts);
2724 let old_version = self.version.clone();
2725 self.send_operation(Operation::Buffer(operation), true, cx);
2726 self.did_edit(&old_version, was_dirty, cx);
2727 }
2728
2729 /// Manually redoes a specific transaction in the buffer's redo history.
2730 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2731 let was_dirty = self.is_dirty();
2732 let old_version = self.version.clone();
2733
2734 if let Some((transaction_id, operation)) = self.text.redo() {
2735 self.send_operation(Operation::Buffer(operation), true, cx);
2736 self.did_edit(&old_version, was_dirty, cx);
2737 Some(transaction_id)
2738 } else {
2739 None
2740 }
2741 }
2742
2743 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2744 pub fn redo_to_transaction(
2745 &mut self,
2746 transaction_id: TransactionId,
2747 cx: &mut Context<Self>,
2748 ) -> bool {
2749 let was_dirty = self.is_dirty();
2750 let old_version = self.version.clone();
2751
2752 let operations = self.text.redo_to_transaction(transaction_id);
2753 let redone = !operations.is_empty();
2754 for operation in operations {
2755 self.send_operation(Operation::Buffer(operation), true, cx);
2756 }
2757 if redone {
2758 self.did_edit(&old_version, was_dirty, cx)
2759 }
2760 redone
2761 }
2762
2763 /// Override current completion triggers with the user-provided completion triggers.
2764 pub fn set_completion_triggers(
2765 &mut self,
2766 server_id: LanguageServerId,
2767 triggers: BTreeSet<String>,
2768 cx: &mut Context<Self>,
2769 ) {
2770 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2771 if triggers.is_empty() {
2772 self.completion_triggers_per_language_server
2773 .remove(&server_id);
2774 self.completion_triggers = self
2775 .completion_triggers_per_language_server
2776 .values()
2777 .flat_map(|triggers| triggers.into_iter().cloned())
2778 .collect();
2779 } else {
2780 self.completion_triggers_per_language_server
2781 .insert(server_id, triggers.clone());
2782 self.completion_triggers.extend(triggers.iter().cloned());
2783 }
2784 self.send_operation(
2785 Operation::UpdateCompletionTriggers {
2786 triggers: triggers.into_iter().collect(),
2787 lamport_timestamp: self.completion_triggers_timestamp,
2788 server_id,
2789 },
2790 true,
2791 cx,
2792 );
2793 cx.notify();
2794 }
2795
2796 /// Returns a list of strings which trigger a completion menu for this language.
2797 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2798 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2799 &self.completion_triggers
2800 }
2801
2802 /// Call this directly after performing edits to prevent the preview tab
2803 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2804 /// to return false until there are additional edits.
2805 pub fn refresh_preview(&mut self) {
2806 self.preview_version = self.version.clone();
2807 }
2808
2809 /// Whether we should preserve the preview status of a tab containing this buffer.
2810 pub fn preserve_preview(&self) -> bool {
2811 !self.has_edits_since(&self.preview_version)
2812 }
2813}
2814
2815#[doc(hidden)]
2816#[cfg(any(test, feature = "test-support"))]
2817impl Buffer {
2818 pub fn edit_via_marked_text(
2819 &mut self,
2820 marked_string: &str,
2821 autoindent_mode: Option<AutoindentMode>,
2822 cx: &mut Context<Self>,
2823 ) {
2824 let edits = self.edits_for_marked_text(marked_string);
2825 self.edit(edits, autoindent_mode, cx);
2826 }
2827
2828 pub fn set_group_interval(&mut self, group_interval: Duration) {
2829 self.text.set_group_interval(group_interval);
2830 }
2831
2832 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2833 where
2834 T: rand::Rng,
2835 {
2836 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2837 let mut last_end = None;
2838 for _ in 0..old_range_count {
2839 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2840 break;
2841 }
2842
2843 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2844 let mut range = self.random_byte_range(new_start, rng);
2845 if rng.gen_bool(0.2) {
2846 mem::swap(&mut range.start, &mut range.end);
2847 }
2848 last_end = Some(range.end);
2849
2850 let new_text_len = rng.gen_range(0..10);
2851 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2852 new_text = new_text.to_uppercase();
2853
2854 edits.push((range, new_text));
2855 }
2856 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2857 self.edit(edits, None, cx);
2858 }
2859
2860 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2861 let was_dirty = self.is_dirty();
2862 let old_version = self.version.clone();
2863
2864 let ops = self.text.randomly_undo_redo(rng);
2865 if !ops.is_empty() {
2866 for op in ops {
2867 self.send_operation(Operation::Buffer(op), true, cx);
2868 self.did_edit(&old_version, was_dirty, cx);
2869 }
2870 }
2871 }
2872}
2873
2874impl EventEmitter<BufferEvent> for Buffer {}
2875
2876impl Deref for Buffer {
2877 type Target = TextBuffer;
2878
2879 fn deref(&self) -> &Self::Target {
2880 &self.text
2881 }
2882}
2883
2884impl BufferSnapshot {
2885 /// Returns [`IndentSize`] for a given line that respects user settings and
2886 /// language preferences.
2887 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2888 indent_size_for_line(self, row)
2889 }
2890
2891 /// Returns [`IndentSize`] for a given position that respects user settings
2892 /// and language preferences.
2893 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2894 let settings = language_settings(
2895 self.language_at(position).map(|l| l.name()),
2896 self.file(),
2897 cx,
2898 );
2899 if settings.hard_tabs {
2900 IndentSize::tab()
2901 } else {
2902 IndentSize::spaces(settings.tab_size.get())
2903 }
2904 }
2905
2906 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2907 /// is passed in as `single_indent_size`.
2908 pub fn suggested_indents(
2909 &self,
2910 rows: impl Iterator<Item = u32>,
2911 single_indent_size: IndentSize,
2912 ) -> BTreeMap<u32, IndentSize> {
2913 let mut result = BTreeMap::new();
2914
2915 for row_range in contiguous_ranges(rows, 10) {
2916 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2917 Some(suggestions) => suggestions,
2918 _ => break,
2919 };
2920
2921 for (row, suggestion) in row_range.zip(suggestions) {
2922 let indent_size = if let Some(suggestion) = suggestion {
2923 result
2924 .get(&suggestion.basis_row)
2925 .copied()
2926 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2927 .with_delta(suggestion.delta, single_indent_size)
2928 } else {
2929 self.indent_size_for_line(row)
2930 };
2931
2932 result.insert(row, indent_size);
2933 }
2934 }
2935
2936 result
2937 }
2938
2939 fn suggest_autoindents(
2940 &self,
2941 row_range: Range<u32>,
2942 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2943 let config = &self.language.as_ref()?.config;
2944 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2945
2946 #[derive(Debug, Clone)]
2947 struct StartPosition {
2948 start: Point,
2949 suffix: SharedString,
2950 }
2951
2952 // Find the suggested indentation ranges based on the syntax tree.
2953 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2954 let end = Point::new(row_range.end, 0);
2955 let range = (start..end).to_offset(&self.text);
2956 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2957 Some(&grammar.indents_config.as_ref()?.query)
2958 });
2959 let indent_configs = matches
2960 .grammars()
2961 .iter()
2962 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2963 .collect::<Vec<_>>();
2964
2965 let mut indent_ranges = Vec::<Range<Point>>::new();
2966 let mut start_positions = Vec::<StartPosition>::new();
2967 let mut outdent_positions = Vec::<Point>::new();
2968 while let Some(mat) = matches.peek() {
2969 let mut start: Option<Point> = None;
2970 let mut end: Option<Point> = None;
2971
2972 let config = indent_configs[mat.grammar_index];
2973 for capture in mat.captures {
2974 if capture.index == config.indent_capture_ix {
2975 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2976 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2977 } else if Some(capture.index) == config.start_capture_ix {
2978 start = Some(Point::from_ts_point(capture.node.end_position()));
2979 } else if Some(capture.index) == config.end_capture_ix {
2980 end = Some(Point::from_ts_point(capture.node.start_position()));
2981 } else if Some(capture.index) == config.outdent_capture_ix {
2982 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2983 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2984 start_positions.push(StartPosition {
2985 start: Point::from_ts_point(capture.node.start_position()),
2986 suffix: suffix.clone(),
2987 });
2988 }
2989 }
2990
2991 matches.advance();
2992 if let Some((start, end)) = start.zip(end) {
2993 if start.row == end.row {
2994 continue;
2995 }
2996 let range = start..end;
2997 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2998 Err(ix) => indent_ranges.insert(ix, range),
2999 Ok(ix) => {
3000 let prev_range = &mut indent_ranges[ix];
3001 prev_range.end = prev_range.end.max(range.end);
3002 }
3003 }
3004 }
3005 }
3006
3007 let mut error_ranges = Vec::<Range<Point>>::new();
3008 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3009 grammar.error_query.as_ref()
3010 });
3011 while let Some(mat) = matches.peek() {
3012 let node = mat.captures[0].node;
3013 let start = Point::from_ts_point(node.start_position());
3014 let end = Point::from_ts_point(node.end_position());
3015 let range = start..end;
3016 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3017 Ok(ix) | Err(ix) => ix,
3018 };
3019 let mut end_ix = ix;
3020 while let Some(existing_range) = error_ranges.get(end_ix) {
3021 if existing_range.end < end {
3022 end_ix += 1;
3023 } else {
3024 break;
3025 }
3026 }
3027 error_ranges.splice(ix..end_ix, [range]);
3028 matches.advance();
3029 }
3030
3031 outdent_positions.sort();
3032 for outdent_position in outdent_positions {
3033 // find the innermost indent range containing this outdent_position
3034 // set its end to the outdent position
3035 if let Some(range_to_truncate) = indent_ranges
3036 .iter_mut()
3037 .filter(|indent_range| indent_range.contains(&outdent_position))
3038 .next_back()
3039 {
3040 range_to_truncate.end = outdent_position;
3041 }
3042 }
3043
3044 start_positions.sort_by_key(|b| b.start);
3045
3046 // Find the suggested indentation increases and decreased based on regexes.
3047 let mut regex_outdent_map = HashMap::default();
3048 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3049 let mut start_positions_iter = start_positions.iter().peekable();
3050
3051 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3052 self.for_each_line(
3053 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3054 ..Point::new(row_range.end, 0),
3055 |row, line| {
3056 if config
3057 .decrease_indent_pattern
3058 .as_ref()
3059 .map_or(false, |regex| regex.is_match(line))
3060 {
3061 indent_change_rows.push((row, Ordering::Less));
3062 }
3063 if config
3064 .increase_indent_pattern
3065 .as_ref()
3066 .map_or(false, |regex| regex.is_match(line))
3067 {
3068 indent_change_rows.push((row + 1, Ordering::Greater));
3069 }
3070 while let Some(pos) = start_positions_iter.peek() {
3071 if pos.start.row < row {
3072 let pos = start_positions_iter.next().unwrap();
3073 last_seen_suffix
3074 .entry(pos.suffix.to_string())
3075 .or_default()
3076 .push(pos.start);
3077 } else {
3078 break;
3079 }
3080 }
3081 for rule in &config.decrease_indent_patterns {
3082 if rule.pattern.as_ref().map_or(false, |r| r.is_match(line)) {
3083 let row_start_column = self.indent_size_for_line(row).len;
3084 let basis_row = rule
3085 .valid_after
3086 .iter()
3087 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3088 .flatten()
3089 .filter(|start_point| start_point.column <= row_start_column)
3090 .max_by_key(|start_point| start_point.row);
3091 if let Some(outdent_to_row) = basis_row {
3092 regex_outdent_map.insert(row, outdent_to_row.row);
3093 }
3094 break;
3095 }
3096 }
3097 },
3098 );
3099
3100 let mut indent_changes = indent_change_rows.into_iter().peekable();
3101 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3102 prev_non_blank_row.unwrap_or(0)
3103 } else {
3104 row_range.start.saturating_sub(1)
3105 };
3106
3107 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3108 Some(row_range.map(move |row| {
3109 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3110
3111 let mut indent_from_prev_row = false;
3112 let mut outdent_from_prev_row = false;
3113 let mut outdent_to_row = u32::MAX;
3114 let mut from_regex = false;
3115
3116 while let Some((indent_row, delta)) = indent_changes.peek() {
3117 match indent_row.cmp(&row) {
3118 Ordering::Equal => match delta {
3119 Ordering::Less => {
3120 from_regex = true;
3121 outdent_from_prev_row = true
3122 }
3123 Ordering::Greater => {
3124 indent_from_prev_row = true;
3125 from_regex = true
3126 }
3127 _ => {}
3128 },
3129
3130 Ordering::Greater => break,
3131 Ordering::Less => {}
3132 }
3133
3134 indent_changes.next();
3135 }
3136
3137 for range in &indent_ranges {
3138 if range.start.row >= row {
3139 break;
3140 }
3141 if range.start.row == prev_row && range.end > row_start {
3142 indent_from_prev_row = true;
3143 }
3144 if range.end > prev_row_start && range.end <= row_start {
3145 outdent_to_row = outdent_to_row.min(range.start.row);
3146 }
3147 }
3148
3149 if let Some(basis_row) = regex_outdent_map.get(&row) {
3150 indent_from_prev_row = false;
3151 outdent_to_row = *basis_row;
3152 from_regex = true;
3153 }
3154
3155 let within_error = error_ranges
3156 .iter()
3157 .any(|e| e.start.row < row && e.end > row_start);
3158
3159 let suggestion = if outdent_to_row == prev_row
3160 || (outdent_from_prev_row && indent_from_prev_row)
3161 {
3162 Some(IndentSuggestion {
3163 basis_row: prev_row,
3164 delta: Ordering::Equal,
3165 within_error: within_error && !from_regex,
3166 })
3167 } else if indent_from_prev_row {
3168 Some(IndentSuggestion {
3169 basis_row: prev_row,
3170 delta: Ordering::Greater,
3171 within_error: within_error && !from_regex,
3172 })
3173 } else if outdent_to_row < prev_row {
3174 Some(IndentSuggestion {
3175 basis_row: outdent_to_row,
3176 delta: Ordering::Equal,
3177 within_error: within_error && !from_regex,
3178 })
3179 } else if outdent_from_prev_row {
3180 Some(IndentSuggestion {
3181 basis_row: prev_row,
3182 delta: Ordering::Less,
3183 within_error: within_error && !from_regex,
3184 })
3185 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3186 {
3187 Some(IndentSuggestion {
3188 basis_row: prev_row,
3189 delta: Ordering::Equal,
3190 within_error: within_error && !from_regex,
3191 })
3192 } else {
3193 None
3194 };
3195
3196 prev_row = row;
3197 prev_row_start = row_start;
3198 suggestion
3199 }))
3200 }
3201
3202 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3203 while row > 0 {
3204 row -= 1;
3205 if !self.is_line_blank(row) {
3206 return Some(row);
3207 }
3208 }
3209 None
3210 }
3211
3212 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3213 let captures = self.syntax.captures(range, &self.text, |grammar| {
3214 grammar.highlights_query.as_ref()
3215 });
3216 let highlight_maps = captures
3217 .grammars()
3218 .iter()
3219 .map(|grammar| grammar.highlight_map())
3220 .collect();
3221 (captures, highlight_maps)
3222 }
3223
3224 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3225 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3226 /// returned in chunks where each chunk has a single syntax highlighting style and
3227 /// diagnostic status.
3228 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3229 let range = range.start.to_offset(self)..range.end.to_offset(self);
3230
3231 let mut syntax = None;
3232 if language_aware {
3233 syntax = Some(self.get_highlights(range.clone()));
3234 }
3235 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3236 let diagnostics = language_aware;
3237 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3238 }
3239
3240 pub fn highlighted_text_for_range<T: ToOffset>(
3241 &self,
3242 range: Range<T>,
3243 override_style: Option<HighlightStyle>,
3244 syntax_theme: &SyntaxTheme,
3245 ) -> HighlightedText {
3246 HighlightedText::from_buffer_range(
3247 range,
3248 &self.text,
3249 &self.syntax,
3250 override_style,
3251 syntax_theme,
3252 )
3253 }
3254
3255 /// Invokes the given callback for each line of text in the given range of the buffer.
3256 /// Uses callback to avoid allocating a string for each line.
3257 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3258 let mut line = String::new();
3259 let mut row = range.start.row;
3260 for chunk in self
3261 .as_rope()
3262 .chunks_in_range(range.to_offset(self))
3263 .chain(["\n"])
3264 {
3265 for (newline_ix, text) in chunk.split('\n').enumerate() {
3266 if newline_ix > 0 {
3267 callback(row, &line);
3268 row += 1;
3269 line.clear();
3270 }
3271 line.push_str(text);
3272 }
3273 }
3274 }
3275
3276 /// Iterates over every [`SyntaxLayer`] in the buffer.
3277 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3278 self.syntax
3279 .layers_for_range(0..self.len(), &self.text, true)
3280 }
3281
3282 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3283 let offset = position.to_offset(self);
3284 self.syntax
3285 .layers_for_range(offset..offset, &self.text, false)
3286 .filter(|l| l.node().end_byte() > offset)
3287 .last()
3288 }
3289
3290 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3291 &self,
3292 range: Range<D>,
3293 ) -> Option<SyntaxLayer<'_>> {
3294 let range = range.to_offset(self);
3295 return self
3296 .syntax
3297 .layers_for_range(range, &self.text, false)
3298 .max_by(|a, b| {
3299 if a.depth != b.depth {
3300 a.depth.cmp(&b.depth)
3301 } else if a.offset.0 != b.offset.0 {
3302 a.offset.0.cmp(&b.offset.0)
3303 } else {
3304 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3305 }
3306 });
3307 }
3308
3309 /// Returns the main [`Language`].
3310 pub fn language(&self) -> Option<&Arc<Language>> {
3311 self.language.as_ref()
3312 }
3313
3314 /// Returns the [`Language`] at the given location.
3315 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3316 self.syntax_layer_at(position)
3317 .map(|info| info.language)
3318 .or(self.language.as_ref())
3319 }
3320
3321 /// Returns the settings for the language at the given location.
3322 pub fn settings_at<'a, D: ToOffset>(
3323 &'a self,
3324 position: D,
3325 cx: &'a App,
3326 ) -> Cow<'a, LanguageSettings> {
3327 language_settings(
3328 self.language_at(position).map(|l| l.name()),
3329 self.file.as_ref(),
3330 cx,
3331 )
3332 }
3333
3334 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3335 CharClassifier::new(self.language_scope_at(point))
3336 }
3337
3338 /// Returns the [`LanguageScope`] at the given location.
3339 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3340 let offset = position.to_offset(self);
3341 let mut scope = None;
3342 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3343
3344 // Use the layer that has the smallest node intersecting the given point.
3345 for layer in self
3346 .syntax
3347 .layers_for_range(offset..offset, &self.text, false)
3348 {
3349 let mut cursor = layer.node().walk();
3350
3351 let mut range = None;
3352 loop {
3353 let child_range = cursor.node().byte_range();
3354 if !child_range.contains(&offset) {
3355 break;
3356 }
3357
3358 range = Some(child_range);
3359 if cursor.goto_first_child_for_byte(offset).is_none() {
3360 break;
3361 }
3362 }
3363
3364 if let Some(range) = range
3365 && smallest_range_and_depth.as_ref().map_or(
3366 true,
3367 |(smallest_range, smallest_range_depth)| {
3368 if layer.depth > *smallest_range_depth {
3369 true
3370 } else if layer.depth == *smallest_range_depth {
3371 range.len() < smallest_range.len()
3372 } else {
3373 false
3374 }
3375 },
3376 ) {
3377 smallest_range_and_depth = Some((range, layer.depth));
3378 scope = Some(LanguageScope {
3379 language: layer.language.clone(),
3380 override_id: layer.override_id(offset, &self.text),
3381 });
3382 }
3383 }
3384
3385 scope.or_else(|| {
3386 self.language.clone().map(|language| LanguageScope {
3387 language,
3388 override_id: None,
3389 })
3390 })
3391 }
3392
3393 /// Returns a tuple of the range and character kind of the word
3394 /// surrounding the given position.
3395 pub fn surrounding_word<T: ToOffset>(
3396 &self,
3397 start: T,
3398 for_completion: bool,
3399 ) -> (Range<usize>, Option<CharKind>) {
3400 let mut start = start.to_offset(self);
3401 let mut end = start;
3402 let mut next_chars = self.chars_at(start).take(128).peekable();
3403 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3404
3405 let classifier = self
3406 .char_classifier_at(start)
3407 .for_completion(for_completion);
3408 let word_kind = cmp::max(
3409 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3410 next_chars.peek().copied().map(|c| classifier.kind(c)),
3411 );
3412
3413 for ch in prev_chars {
3414 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3415 start -= ch.len_utf8();
3416 } else {
3417 break;
3418 }
3419 }
3420
3421 for ch in next_chars {
3422 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3423 end += ch.len_utf8();
3424 } else {
3425 break;
3426 }
3427 }
3428
3429 (start..end, word_kind)
3430 }
3431
3432 /// Returns the closest syntax node enclosing the given range.
3433 pub fn syntax_ancestor<'a, T: ToOffset>(
3434 &'a self,
3435 range: Range<T>,
3436 ) -> Option<tree_sitter::Node<'a>> {
3437 let range = range.start.to_offset(self)..range.end.to_offset(self);
3438 let mut result: Option<tree_sitter::Node<'a>> = None;
3439 'outer: for layer in self
3440 .syntax
3441 .layers_for_range(range.clone(), &self.text, true)
3442 {
3443 let mut cursor = layer.node().walk();
3444
3445 // Descend to the first leaf that touches the start of the range.
3446 //
3447 // If the range is non-empty and the current node ends exactly at the start,
3448 // move to the next sibling to find a node that extends beyond the start.
3449 //
3450 // If the range is empty and the current node starts after the range position,
3451 // move to the previous sibling to find the node that contains the position.
3452 while cursor.goto_first_child_for_byte(range.start).is_some() {
3453 if !range.is_empty() && cursor.node().end_byte() == range.start {
3454 cursor.goto_next_sibling();
3455 }
3456 if range.is_empty() && cursor.node().start_byte() > range.start {
3457 cursor.goto_previous_sibling();
3458 }
3459 }
3460
3461 // Ascend to the smallest ancestor that strictly contains the range.
3462 loop {
3463 let node_range = cursor.node().byte_range();
3464 if node_range.start <= range.start
3465 && node_range.end >= range.end
3466 && node_range.len() > range.len()
3467 {
3468 break;
3469 }
3470 if !cursor.goto_parent() {
3471 continue 'outer;
3472 }
3473 }
3474
3475 let left_node = cursor.node();
3476 let mut layer_result = left_node;
3477
3478 // For an empty range, try to find another node immediately to the right of the range.
3479 if left_node.end_byte() == range.start {
3480 let mut right_node = None;
3481 while !cursor.goto_next_sibling() {
3482 if !cursor.goto_parent() {
3483 break;
3484 }
3485 }
3486
3487 while cursor.node().start_byte() == range.start {
3488 right_node = Some(cursor.node());
3489 if !cursor.goto_first_child() {
3490 break;
3491 }
3492 }
3493
3494 // If there is a candidate node on both sides of the (empty) range, then
3495 // decide between the two by favoring a named node over an anonymous token.
3496 // If both nodes are the same in that regard, favor the right one.
3497 if let Some(right_node) = right_node
3498 && (right_node.is_named() || !left_node.is_named()) {
3499 layer_result = right_node;
3500 }
3501 }
3502
3503 if let Some(previous_result) = &result
3504 && previous_result.byte_range().len() < layer_result.byte_range().len() {
3505 continue;
3506 }
3507 result = Some(layer_result);
3508 }
3509
3510 result
3511 }
3512
3513 /// Returns the root syntax node within the given row
3514 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3515 let start_offset = position.to_offset(self);
3516
3517 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3518
3519 let layer = self
3520 .syntax
3521 .layers_for_range(start_offset..start_offset, &self.text, true)
3522 .next()?;
3523
3524 let mut cursor = layer.node().walk();
3525
3526 // Descend to the first leaf that touches the start of the range.
3527 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3528 if cursor.node().end_byte() == start_offset {
3529 cursor.goto_next_sibling();
3530 }
3531 }
3532
3533 // Ascend to the root node within the same row.
3534 while cursor.goto_parent() {
3535 if cursor.node().start_position().row != row {
3536 break;
3537 }
3538 }
3539
3540 return Some(cursor.node());
3541 }
3542
3543 /// Returns the outline for the buffer.
3544 ///
3545 /// This method allows passing an optional [`SyntaxTheme`] to
3546 /// syntax-highlight the returned symbols.
3547 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3548 self.outline_items_containing(0..self.len(), true, theme)
3549 .map(Outline::new)
3550 }
3551
3552 /// Returns all the symbols that contain the given position.
3553 ///
3554 /// This method allows passing an optional [`SyntaxTheme`] to
3555 /// syntax-highlight the returned symbols.
3556 pub fn symbols_containing<T: ToOffset>(
3557 &self,
3558 position: T,
3559 theme: Option<&SyntaxTheme>,
3560 ) -> Option<Vec<OutlineItem<Anchor>>> {
3561 let position = position.to_offset(self);
3562 let mut items = self.outline_items_containing(
3563 position.saturating_sub(1)..self.len().min(position + 1),
3564 false,
3565 theme,
3566 )?;
3567 let mut prev_depth = None;
3568 items.retain(|item| {
3569 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3570 prev_depth = Some(item.depth);
3571 result
3572 });
3573 Some(items)
3574 }
3575
3576 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3577 let range = range.to_offset(self);
3578 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3579 grammar.outline_config.as_ref().map(|c| &c.query)
3580 });
3581 let configs = matches
3582 .grammars()
3583 .iter()
3584 .map(|g| g.outline_config.as_ref().unwrap())
3585 .collect::<Vec<_>>();
3586
3587 while let Some(mat) = matches.peek() {
3588 let config = &configs[mat.grammar_index];
3589 let containing_item_node = maybe!({
3590 let item_node = mat.captures.iter().find_map(|cap| {
3591 if cap.index == config.item_capture_ix {
3592 Some(cap.node)
3593 } else {
3594 None
3595 }
3596 })?;
3597
3598 let item_byte_range = item_node.byte_range();
3599 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3600 None
3601 } else {
3602 Some(item_node)
3603 }
3604 });
3605
3606 if let Some(item_node) = containing_item_node {
3607 return Some(
3608 Point::from_ts_point(item_node.start_position())
3609 ..Point::from_ts_point(item_node.end_position()),
3610 );
3611 }
3612
3613 matches.advance();
3614 }
3615 None
3616 }
3617
3618 pub fn outline_items_containing<T: ToOffset>(
3619 &self,
3620 range: Range<T>,
3621 include_extra_context: bool,
3622 theme: Option<&SyntaxTheme>,
3623 ) -> Option<Vec<OutlineItem<Anchor>>> {
3624 let range = range.to_offset(self);
3625 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3626 grammar.outline_config.as_ref().map(|c| &c.query)
3627 });
3628 let configs = matches
3629 .grammars()
3630 .iter()
3631 .map(|g| g.outline_config.as_ref().unwrap())
3632 .collect::<Vec<_>>();
3633
3634 let mut items = Vec::new();
3635 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3636 while let Some(mat) = matches.peek() {
3637 let config = &configs[mat.grammar_index];
3638 if let Some(item) =
3639 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3640 {
3641 items.push(item);
3642 } else if let Some(capture) = mat
3643 .captures
3644 .iter()
3645 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3646 {
3647 let capture_range = capture.node.start_position()..capture.node.end_position();
3648 let mut capture_row_range =
3649 capture_range.start.row as u32..capture_range.end.row as u32;
3650 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3651 {
3652 capture_row_range.end -= 1;
3653 }
3654 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3655 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3656 last_row_range.end = capture_row_range.end;
3657 } else {
3658 annotation_row_ranges.push(capture_row_range);
3659 }
3660 } else {
3661 annotation_row_ranges.push(capture_row_range);
3662 }
3663 }
3664 matches.advance();
3665 }
3666
3667 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3668
3669 // Assign depths based on containment relationships and convert to anchors.
3670 let mut item_ends_stack = Vec::<Point>::new();
3671 let mut anchor_items = Vec::new();
3672 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3673 for item in items {
3674 while let Some(last_end) = item_ends_stack.last().copied() {
3675 if last_end < item.range.end {
3676 item_ends_stack.pop();
3677 } else {
3678 break;
3679 }
3680 }
3681
3682 let mut annotation_row_range = None;
3683 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3684 let row_preceding_item = item.range.start.row.saturating_sub(1);
3685 if next_annotation_row_range.end < row_preceding_item {
3686 annotation_row_ranges.next();
3687 } else {
3688 if next_annotation_row_range.end == row_preceding_item {
3689 annotation_row_range = Some(next_annotation_row_range.clone());
3690 annotation_row_ranges.next();
3691 }
3692 break;
3693 }
3694 }
3695
3696 anchor_items.push(OutlineItem {
3697 depth: item_ends_stack.len(),
3698 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3699 text: item.text,
3700 highlight_ranges: item.highlight_ranges,
3701 name_ranges: item.name_ranges,
3702 body_range: item.body_range.map(|body_range| {
3703 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3704 }),
3705 annotation_range: annotation_row_range.map(|annotation_range| {
3706 self.anchor_after(Point::new(annotation_range.start, 0))
3707 ..self.anchor_before(Point::new(
3708 annotation_range.end,
3709 self.line_len(annotation_range.end),
3710 ))
3711 }),
3712 });
3713 item_ends_stack.push(item.range.end);
3714 }
3715
3716 Some(anchor_items)
3717 }
3718
3719 fn next_outline_item(
3720 &self,
3721 config: &OutlineConfig,
3722 mat: &SyntaxMapMatch,
3723 range: &Range<usize>,
3724 include_extra_context: bool,
3725 theme: Option<&SyntaxTheme>,
3726 ) -> Option<OutlineItem<Point>> {
3727 let item_node = mat.captures.iter().find_map(|cap| {
3728 if cap.index == config.item_capture_ix {
3729 Some(cap.node)
3730 } else {
3731 None
3732 }
3733 })?;
3734
3735 let item_byte_range = item_node.byte_range();
3736 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3737 return None;
3738 }
3739 let item_point_range = Point::from_ts_point(item_node.start_position())
3740 ..Point::from_ts_point(item_node.end_position());
3741
3742 let mut open_point = None;
3743 let mut close_point = None;
3744 let mut buffer_ranges = Vec::new();
3745 for capture in mat.captures {
3746 let node_is_name;
3747 if capture.index == config.name_capture_ix {
3748 node_is_name = true;
3749 } else if Some(capture.index) == config.context_capture_ix
3750 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3751 {
3752 node_is_name = false;
3753 } else {
3754 if Some(capture.index) == config.open_capture_ix {
3755 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3756 } else if Some(capture.index) == config.close_capture_ix {
3757 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3758 }
3759
3760 continue;
3761 }
3762
3763 let mut range = capture.node.start_byte()..capture.node.end_byte();
3764 let start = capture.node.start_position();
3765 if capture.node.end_position().row > start.row {
3766 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3767 }
3768
3769 if !range.is_empty() {
3770 buffer_ranges.push((range, node_is_name));
3771 }
3772 }
3773 if buffer_ranges.is_empty() {
3774 return None;
3775 }
3776 let mut text = String::new();
3777 let mut highlight_ranges = Vec::new();
3778 let mut name_ranges = Vec::new();
3779 let mut chunks = self.chunks(
3780 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3781 true,
3782 );
3783 let mut last_buffer_range_end = 0;
3784
3785 for (buffer_range, is_name) in buffer_ranges {
3786 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3787 if space_added {
3788 text.push(' ');
3789 }
3790 let before_append_len = text.len();
3791 let mut offset = buffer_range.start;
3792 chunks.seek(buffer_range.clone());
3793 for mut chunk in chunks.by_ref() {
3794 if chunk.text.len() > buffer_range.end - offset {
3795 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3796 offset = buffer_range.end;
3797 } else {
3798 offset += chunk.text.len();
3799 }
3800 let style = chunk
3801 .syntax_highlight_id
3802 .zip(theme)
3803 .and_then(|(highlight, theme)| highlight.style(theme));
3804 if let Some(style) = style {
3805 let start = text.len();
3806 let end = start + chunk.text.len();
3807 highlight_ranges.push((start..end, style));
3808 }
3809 text.push_str(chunk.text);
3810 if offset >= buffer_range.end {
3811 break;
3812 }
3813 }
3814 if is_name {
3815 let after_append_len = text.len();
3816 let start = if space_added && !name_ranges.is_empty() {
3817 before_append_len - 1
3818 } else {
3819 before_append_len
3820 };
3821 name_ranges.push(start..after_append_len);
3822 }
3823 last_buffer_range_end = buffer_range.end;
3824 }
3825
3826 Some(OutlineItem {
3827 depth: 0, // We'll calculate the depth later
3828 range: item_point_range,
3829 text,
3830 highlight_ranges,
3831 name_ranges,
3832 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3833 annotation_range: None,
3834 })
3835 }
3836
3837 pub fn function_body_fold_ranges<T: ToOffset>(
3838 &self,
3839 within: Range<T>,
3840 ) -> impl Iterator<Item = Range<usize>> + '_ {
3841 self.text_object_ranges(within, TreeSitterOptions::default())
3842 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3843 }
3844
3845 /// For each grammar in the language, runs the provided
3846 /// [`tree_sitter::Query`] against the given range.
3847 pub fn matches(
3848 &self,
3849 range: Range<usize>,
3850 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3851 ) -> SyntaxMapMatches<'_> {
3852 self.syntax.matches(range, self, query)
3853 }
3854
3855 pub fn all_bracket_ranges(
3856 &self,
3857 range: Range<usize>,
3858 ) -> impl Iterator<Item = BracketMatch> + '_ {
3859 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3860 grammar.brackets_config.as_ref().map(|c| &c.query)
3861 });
3862 let configs = matches
3863 .grammars()
3864 .iter()
3865 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3866 .collect::<Vec<_>>();
3867
3868 iter::from_fn(move || {
3869 while let Some(mat) = matches.peek() {
3870 let mut open = None;
3871 let mut close = None;
3872 let config = &configs[mat.grammar_index];
3873 let pattern = &config.patterns[mat.pattern_index];
3874 for capture in mat.captures {
3875 if capture.index == config.open_capture_ix {
3876 open = Some(capture.node.byte_range());
3877 } else if capture.index == config.close_capture_ix {
3878 close = Some(capture.node.byte_range());
3879 }
3880 }
3881
3882 matches.advance();
3883
3884 let Some((open_range, close_range)) = open.zip(close) else {
3885 continue;
3886 };
3887
3888 let bracket_range = open_range.start..=close_range.end;
3889 if !bracket_range.overlaps(&range) {
3890 continue;
3891 }
3892
3893 return Some(BracketMatch {
3894 open_range,
3895 close_range,
3896 newline_only: pattern.newline_only,
3897 });
3898 }
3899 None
3900 })
3901 }
3902
3903 /// Returns bracket range pairs overlapping or adjacent to `range`
3904 pub fn bracket_ranges<T: ToOffset>(
3905 &self,
3906 range: Range<T>,
3907 ) -> impl Iterator<Item = BracketMatch> + '_ {
3908 // Find bracket pairs that *inclusively* contain the given range.
3909 let range = range.start.to_offset(self).saturating_sub(1)
3910 ..self.len().min(range.end.to_offset(self) + 1);
3911 self.all_bracket_ranges(range)
3912 .filter(|pair| !pair.newline_only)
3913 }
3914
3915 pub fn debug_variables_query<T: ToOffset>(
3916 &self,
3917 range: Range<T>,
3918 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3919 let range = range.start.to_offset(self).saturating_sub(1)
3920 ..self.len().min(range.end.to_offset(self) + 1);
3921
3922 let mut matches = self.syntax.matches_with_options(
3923 range.clone(),
3924 &self.text,
3925 TreeSitterOptions::default(),
3926 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3927 );
3928
3929 let configs = matches
3930 .grammars()
3931 .iter()
3932 .map(|grammar| grammar.debug_variables_config.as_ref())
3933 .collect::<Vec<_>>();
3934
3935 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3936
3937 iter::from_fn(move || {
3938 loop {
3939 while let Some(capture) = captures.pop() {
3940 if capture.0.overlaps(&range) {
3941 return Some(capture);
3942 }
3943 }
3944
3945 let mat = matches.peek()?;
3946
3947 let Some(config) = configs[mat.grammar_index].as_ref() else {
3948 matches.advance();
3949 continue;
3950 };
3951
3952 for capture in mat.captures {
3953 let Some(ix) = config
3954 .objects_by_capture_ix
3955 .binary_search_by_key(&capture.index, |e| e.0)
3956 .ok()
3957 else {
3958 continue;
3959 };
3960 let text_object = config.objects_by_capture_ix[ix].1;
3961 let byte_range = capture.node.byte_range();
3962
3963 let mut found = false;
3964 for (range, existing) in captures.iter_mut() {
3965 if existing == &text_object {
3966 range.start = range.start.min(byte_range.start);
3967 range.end = range.end.max(byte_range.end);
3968 found = true;
3969 break;
3970 }
3971 }
3972
3973 if !found {
3974 captures.push((byte_range, text_object));
3975 }
3976 }
3977
3978 matches.advance();
3979 }
3980 })
3981 }
3982
3983 pub fn text_object_ranges<T: ToOffset>(
3984 &self,
3985 range: Range<T>,
3986 options: TreeSitterOptions,
3987 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3988 let range = range.start.to_offset(self).saturating_sub(1)
3989 ..self.len().min(range.end.to_offset(self) + 1);
3990
3991 let mut matches =
3992 self.syntax
3993 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3994 grammar.text_object_config.as_ref().map(|c| &c.query)
3995 });
3996
3997 let configs = matches
3998 .grammars()
3999 .iter()
4000 .map(|grammar| grammar.text_object_config.as_ref())
4001 .collect::<Vec<_>>();
4002
4003 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4004
4005 iter::from_fn(move || {
4006 loop {
4007 while let Some(capture) = captures.pop() {
4008 if capture.0.overlaps(&range) {
4009 return Some(capture);
4010 }
4011 }
4012
4013 let mat = matches.peek()?;
4014
4015 let Some(config) = configs[mat.grammar_index].as_ref() else {
4016 matches.advance();
4017 continue;
4018 };
4019
4020 for capture in mat.captures {
4021 let Some(ix) = config
4022 .text_objects_by_capture_ix
4023 .binary_search_by_key(&capture.index, |e| e.0)
4024 .ok()
4025 else {
4026 continue;
4027 };
4028 let text_object = config.text_objects_by_capture_ix[ix].1;
4029 let byte_range = capture.node.byte_range();
4030
4031 let mut found = false;
4032 for (range, existing) in captures.iter_mut() {
4033 if existing == &text_object {
4034 range.start = range.start.min(byte_range.start);
4035 range.end = range.end.max(byte_range.end);
4036 found = true;
4037 break;
4038 }
4039 }
4040
4041 if !found {
4042 captures.push((byte_range, text_object));
4043 }
4044 }
4045
4046 matches.advance();
4047 }
4048 })
4049 }
4050
4051 /// Returns enclosing bracket ranges containing the given range
4052 pub fn enclosing_bracket_ranges<T: ToOffset>(
4053 &self,
4054 range: Range<T>,
4055 ) -> impl Iterator<Item = BracketMatch> + '_ {
4056 let range = range.start.to_offset(self)..range.end.to_offset(self);
4057
4058 self.bracket_ranges(range.clone()).filter(move |pair| {
4059 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4060 })
4061 }
4062
4063 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4064 ///
4065 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4066 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4067 &self,
4068 range: Range<T>,
4069 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4070 ) -> Option<(Range<usize>, Range<usize>)> {
4071 let range = range.start.to_offset(self)..range.end.to_offset(self);
4072
4073 // Get the ranges of the innermost pair of brackets.
4074 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4075
4076 for pair in self.enclosing_bracket_ranges(range.clone()) {
4077 if let Some(range_filter) = range_filter
4078 && !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
4079 continue;
4080 }
4081
4082 let len = pair.close_range.end - pair.open_range.start;
4083
4084 if let Some((existing_open, existing_close)) = &result {
4085 let existing_len = existing_close.end - existing_open.start;
4086 if len > existing_len {
4087 continue;
4088 }
4089 }
4090
4091 result = Some((pair.open_range, pair.close_range));
4092 }
4093
4094 result
4095 }
4096
4097 /// Returns anchor ranges for any matches of the redaction query.
4098 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4099 /// will be run on the relevant section of the buffer.
4100 pub fn redacted_ranges<T: ToOffset>(
4101 &self,
4102 range: Range<T>,
4103 ) -> impl Iterator<Item = Range<usize>> + '_ {
4104 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4105 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4106 grammar
4107 .redactions_config
4108 .as_ref()
4109 .map(|config| &config.query)
4110 });
4111
4112 let configs = syntax_matches
4113 .grammars()
4114 .iter()
4115 .map(|grammar| grammar.redactions_config.as_ref())
4116 .collect::<Vec<_>>();
4117
4118 iter::from_fn(move || {
4119 let redacted_range = syntax_matches
4120 .peek()
4121 .and_then(|mat| {
4122 configs[mat.grammar_index].and_then(|config| {
4123 mat.captures
4124 .iter()
4125 .find(|capture| capture.index == config.redaction_capture_ix)
4126 })
4127 })
4128 .map(|mat| mat.node.byte_range());
4129 syntax_matches.advance();
4130 redacted_range
4131 })
4132 }
4133
4134 pub fn injections_intersecting_range<T: ToOffset>(
4135 &self,
4136 range: Range<T>,
4137 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4138 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4139
4140 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4141 grammar
4142 .injection_config
4143 .as_ref()
4144 .map(|config| &config.query)
4145 });
4146
4147 let configs = syntax_matches
4148 .grammars()
4149 .iter()
4150 .map(|grammar| grammar.injection_config.as_ref())
4151 .collect::<Vec<_>>();
4152
4153 iter::from_fn(move || {
4154 let ranges = syntax_matches.peek().and_then(|mat| {
4155 let config = &configs[mat.grammar_index]?;
4156 let content_capture_range = mat.captures.iter().find_map(|capture| {
4157 if capture.index == config.content_capture_ix {
4158 Some(capture.node.byte_range())
4159 } else {
4160 None
4161 }
4162 })?;
4163 let language = self.language_at(content_capture_range.start)?;
4164 Some((content_capture_range, language))
4165 });
4166 syntax_matches.advance();
4167 ranges
4168 })
4169 }
4170
4171 pub fn runnable_ranges(
4172 &self,
4173 offset_range: Range<usize>,
4174 ) -> impl Iterator<Item = RunnableRange> + '_ {
4175 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4176 grammar.runnable_config.as_ref().map(|config| &config.query)
4177 });
4178
4179 let test_configs = syntax_matches
4180 .grammars()
4181 .iter()
4182 .map(|grammar| grammar.runnable_config.as_ref())
4183 .collect::<Vec<_>>();
4184
4185 iter::from_fn(move || {
4186 loop {
4187 let mat = syntax_matches.peek()?;
4188
4189 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4190 let mut run_range = None;
4191 let full_range = mat.captures.iter().fold(
4192 Range {
4193 start: usize::MAX,
4194 end: 0,
4195 },
4196 |mut acc, next| {
4197 let byte_range = next.node.byte_range();
4198 if acc.start > byte_range.start {
4199 acc.start = byte_range.start;
4200 }
4201 if acc.end < byte_range.end {
4202 acc.end = byte_range.end;
4203 }
4204 acc
4205 },
4206 );
4207 if full_range.start > full_range.end {
4208 // We did not find a full spanning range of this match.
4209 return None;
4210 }
4211 let extra_captures: SmallVec<[_; 1]> =
4212 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4213 test_configs
4214 .extra_captures
4215 .get(capture.index as usize)
4216 .cloned()
4217 .and_then(|tag_name| match tag_name {
4218 RunnableCapture::Named(name) => {
4219 Some((capture.node.byte_range(), name))
4220 }
4221 RunnableCapture::Run => {
4222 let _ = run_range.insert(capture.node.byte_range());
4223 None
4224 }
4225 })
4226 }));
4227 let run_range = run_range?;
4228 let tags = test_configs
4229 .query
4230 .property_settings(mat.pattern_index)
4231 .iter()
4232 .filter_map(|property| {
4233 if *property.key == *"tag" {
4234 property
4235 .value
4236 .as_ref()
4237 .map(|value| RunnableTag(value.to_string().into()))
4238 } else {
4239 None
4240 }
4241 })
4242 .collect();
4243 let extra_captures = extra_captures
4244 .into_iter()
4245 .map(|(range, name)| {
4246 (
4247 name.to_string(),
4248 self.text_for_range(range.clone()).collect::<String>(),
4249 )
4250 })
4251 .collect();
4252 // All tags should have the same range.
4253 Some(RunnableRange {
4254 run_range,
4255 full_range,
4256 runnable: Runnable {
4257 tags,
4258 language: mat.language,
4259 buffer: self.remote_id(),
4260 },
4261 extra_captures,
4262 buffer_id: self.remote_id(),
4263 })
4264 });
4265
4266 syntax_matches.advance();
4267 if test_range.is_some() {
4268 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4269 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4270 return test_range;
4271 }
4272 }
4273 })
4274 }
4275
4276 /// Returns selections for remote peers intersecting the given range.
4277 #[allow(clippy::type_complexity)]
4278 pub fn selections_in_range(
4279 &self,
4280 range: Range<Anchor>,
4281 include_local: bool,
4282 ) -> impl Iterator<
4283 Item = (
4284 ReplicaId,
4285 bool,
4286 CursorShape,
4287 impl Iterator<Item = &Selection<Anchor>> + '_,
4288 ),
4289 > + '_ {
4290 self.remote_selections
4291 .iter()
4292 .filter(move |(replica_id, set)| {
4293 (include_local || **replica_id != self.text.replica_id())
4294 && !set.selections.is_empty()
4295 })
4296 .map(move |(replica_id, set)| {
4297 let start_ix = match set.selections.binary_search_by(|probe| {
4298 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4299 }) {
4300 Ok(ix) | Err(ix) => ix,
4301 };
4302 let end_ix = match set.selections.binary_search_by(|probe| {
4303 probe.start.cmp(&range.end, self).then(Ordering::Less)
4304 }) {
4305 Ok(ix) | Err(ix) => ix,
4306 };
4307
4308 (
4309 *replica_id,
4310 set.line_mode,
4311 set.cursor_shape,
4312 set.selections[start_ix..end_ix].iter(),
4313 )
4314 })
4315 }
4316
4317 /// Returns if the buffer contains any diagnostics.
4318 pub fn has_diagnostics(&self) -> bool {
4319 !self.diagnostics.is_empty()
4320 }
4321
4322 /// Returns all the diagnostics intersecting the given range.
4323 pub fn diagnostics_in_range<'a, T, O>(
4324 &'a self,
4325 search_range: Range<T>,
4326 reversed: bool,
4327 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4328 where
4329 T: 'a + Clone + ToOffset,
4330 O: 'a + FromAnchor,
4331 {
4332 let mut iterators: Vec<_> = self
4333 .diagnostics
4334 .iter()
4335 .map(|(_, collection)| {
4336 collection
4337 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4338 .peekable()
4339 })
4340 .collect();
4341
4342 std::iter::from_fn(move || {
4343 let (next_ix, _) = iterators
4344 .iter_mut()
4345 .enumerate()
4346 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4347 .min_by(|(_, a), (_, b)| {
4348 let cmp = a
4349 .range
4350 .start
4351 .cmp(&b.range.start, self)
4352 // when range is equal, sort by diagnostic severity
4353 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4354 // and stabilize order with group_id
4355 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4356 if reversed { cmp.reverse() } else { cmp }
4357 })?;
4358 iterators[next_ix]
4359 .next()
4360 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4361 diagnostic,
4362 range: FromAnchor::from_anchor(&range.start, self)
4363 ..FromAnchor::from_anchor(&range.end, self),
4364 })
4365 })
4366 }
4367
4368 /// Returns all the diagnostic groups associated with the given
4369 /// language server ID. If no language server ID is provided,
4370 /// all diagnostics groups are returned.
4371 pub fn diagnostic_groups(
4372 &self,
4373 language_server_id: Option<LanguageServerId>,
4374 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4375 let mut groups = Vec::new();
4376
4377 if let Some(language_server_id) = language_server_id {
4378 if let Ok(ix) = self
4379 .diagnostics
4380 .binary_search_by_key(&language_server_id, |e| e.0)
4381 {
4382 self.diagnostics[ix]
4383 .1
4384 .groups(language_server_id, &mut groups, self);
4385 }
4386 } else {
4387 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4388 diagnostics.groups(*language_server_id, &mut groups, self);
4389 }
4390 }
4391
4392 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4393 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4394 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4395 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4396 });
4397
4398 groups
4399 }
4400
4401 /// Returns an iterator over the diagnostics for the given group.
4402 pub fn diagnostic_group<O>(
4403 &self,
4404 group_id: usize,
4405 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4406 where
4407 O: FromAnchor + 'static,
4408 {
4409 self.diagnostics
4410 .iter()
4411 .flat_map(move |(_, set)| set.group(group_id, self))
4412 }
4413
4414 /// An integer version number that accounts for all updates besides
4415 /// the buffer's text itself (which is versioned via a version vector).
4416 pub fn non_text_state_update_count(&self) -> usize {
4417 self.non_text_state_update_count
4418 }
4419
4420 /// An integer version that changes when the buffer's syntax changes.
4421 pub fn syntax_update_count(&self) -> usize {
4422 self.syntax.update_count()
4423 }
4424
4425 /// Returns a snapshot of underlying file.
4426 pub fn file(&self) -> Option<&Arc<dyn File>> {
4427 self.file.as_ref()
4428 }
4429
4430 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4431 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4432 if let Some(file) = self.file() {
4433 if file.path().file_name().is_none() || include_root {
4434 Some(file.full_path(cx))
4435 } else {
4436 Some(file.path().to_path_buf())
4437 }
4438 } else {
4439 None
4440 }
4441 }
4442
4443 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4444 let query_str = query.fuzzy_contents;
4445 if query_str.map_or(false, |query| query.is_empty()) {
4446 return BTreeMap::default();
4447 }
4448
4449 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4450 language,
4451 override_id: None,
4452 }));
4453
4454 let mut query_ix = 0;
4455 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4456 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4457
4458 let mut words = BTreeMap::default();
4459 let mut current_word_start_ix = None;
4460 let mut chunk_ix = query.range.start;
4461 for chunk in self.chunks(query.range, false) {
4462 for (i, c) in chunk.text.char_indices() {
4463 let ix = chunk_ix + i;
4464 if classifier.is_word(c) {
4465 if current_word_start_ix.is_none() {
4466 current_word_start_ix = Some(ix);
4467 }
4468
4469 if let Some(query_chars) = &query_chars
4470 && query_ix < query_len
4471 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4472 query_ix += 1;
4473 }
4474 continue;
4475 } else if let Some(word_start) = current_word_start_ix.take()
4476 && query_ix == query_len {
4477 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4478 let mut word_text = self.text_for_range(word_start..ix).peekable();
4479 let first_char = word_text
4480 .peek()
4481 .and_then(|first_chunk| first_chunk.chars().next());
4482 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4483 if !query.skip_digits
4484 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4485 {
4486 words.insert(word_text.collect(), word_range);
4487 }
4488 }
4489 query_ix = 0;
4490 }
4491 chunk_ix += chunk.text.len();
4492 }
4493
4494 words
4495 }
4496}
4497
4498pub struct WordsQuery<'a> {
4499 /// Only returns words with all chars from the fuzzy string in them.
4500 pub fuzzy_contents: Option<&'a str>,
4501 /// Skips words that start with a digit.
4502 pub skip_digits: bool,
4503 /// Buffer offset range, to look for words.
4504 pub range: Range<usize>,
4505}
4506
4507fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4508 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4509}
4510
4511fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4512 let mut result = IndentSize::spaces(0);
4513 for c in text {
4514 let kind = match c {
4515 ' ' => IndentKind::Space,
4516 '\t' => IndentKind::Tab,
4517 _ => break,
4518 };
4519 if result.len == 0 {
4520 result.kind = kind;
4521 }
4522 result.len += 1;
4523 }
4524 result
4525}
4526
4527impl Clone for BufferSnapshot {
4528 fn clone(&self) -> Self {
4529 Self {
4530 text: self.text.clone(),
4531 syntax: self.syntax.clone(),
4532 file: self.file.clone(),
4533 remote_selections: self.remote_selections.clone(),
4534 diagnostics: self.diagnostics.clone(),
4535 language: self.language.clone(),
4536 non_text_state_update_count: self.non_text_state_update_count,
4537 }
4538 }
4539}
4540
4541impl Deref for BufferSnapshot {
4542 type Target = text::BufferSnapshot;
4543
4544 fn deref(&self) -> &Self::Target {
4545 &self.text
4546 }
4547}
4548
4549unsafe impl Send for BufferChunks<'_> {}
4550
4551impl<'a> BufferChunks<'a> {
4552 pub(crate) fn new(
4553 text: &'a Rope,
4554 range: Range<usize>,
4555 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4556 diagnostics: bool,
4557 buffer_snapshot: Option<&'a BufferSnapshot>,
4558 ) -> Self {
4559 let mut highlights = None;
4560 if let Some((captures, highlight_maps)) = syntax {
4561 highlights = Some(BufferChunkHighlights {
4562 captures,
4563 next_capture: None,
4564 stack: Default::default(),
4565 highlight_maps,
4566 })
4567 }
4568
4569 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4570 let chunks = text.chunks_in_range(range.clone());
4571
4572 let mut this = BufferChunks {
4573 range,
4574 buffer_snapshot,
4575 chunks,
4576 diagnostic_endpoints,
4577 error_depth: 0,
4578 warning_depth: 0,
4579 information_depth: 0,
4580 hint_depth: 0,
4581 unnecessary_depth: 0,
4582 underline: true,
4583 highlights,
4584 };
4585 this.initialize_diagnostic_endpoints();
4586 this
4587 }
4588
4589 /// Seeks to the given byte offset in the buffer.
4590 pub fn seek(&mut self, range: Range<usize>) {
4591 let old_range = std::mem::replace(&mut self.range, range.clone());
4592 self.chunks.set_range(self.range.clone());
4593 if let Some(highlights) = self.highlights.as_mut() {
4594 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4595 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4596 highlights
4597 .stack
4598 .retain(|(end_offset, _)| *end_offset > range.start);
4599 if let Some(capture) = &highlights.next_capture
4600 && range.start >= capture.node.start_byte() {
4601 let next_capture_end = capture.node.end_byte();
4602 if range.start < next_capture_end {
4603 highlights.stack.push((
4604 next_capture_end,
4605 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4606 ));
4607 }
4608 highlights.next_capture.take();
4609 }
4610 } else if let Some(snapshot) = self.buffer_snapshot {
4611 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4612 *highlights = BufferChunkHighlights {
4613 captures,
4614 next_capture: None,
4615 stack: Default::default(),
4616 highlight_maps,
4617 };
4618 } else {
4619 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4620 // Seeking such BufferChunks is not supported.
4621 debug_assert!(
4622 false,
4623 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4624 );
4625 }
4626
4627 highlights.captures.set_byte_range(self.range.clone());
4628 self.initialize_diagnostic_endpoints();
4629 }
4630 }
4631
4632 fn initialize_diagnostic_endpoints(&mut self) {
4633 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4634 && let Some(buffer) = self.buffer_snapshot {
4635 let mut diagnostic_endpoints = Vec::new();
4636 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4637 diagnostic_endpoints.push(DiagnosticEndpoint {
4638 offset: entry.range.start,
4639 is_start: true,
4640 severity: entry.diagnostic.severity,
4641 is_unnecessary: entry.diagnostic.is_unnecessary,
4642 underline: entry.diagnostic.underline,
4643 });
4644 diagnostic_endpoints.push(DiagnosticEndpoint {
4645 offset: entry.range.end,
4646 is_start: false,
4647 severity: entry.diagnostic.severity,
4648 is_unnecessary: entry.diagnostic.is_unnecessary,
4649 underline: entry.diagnostic.underline,
4650 });
4651 }
4652 diagnostic_endpoints
4653 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4654 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4655 self.hint_depth = 0;
4656 self.error_depth = 0;
4657 self.warning_depth = 0;
4658 self.information_depth = 0;
4659 }
4660 }
4661
4662 /// The current byte offset in the buffer.
4663 pub fn offset(&self) -> usize {
4664 self.range.start
4665 }
4666
4667 pub fn range(&self) -> Range<usize> {
4668 self.range.clone()
4669 }
4670
4671 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4672 let depth = match endpoint.severity {
4673 DiagnosticSeverity::ERROR => &mut self.error_depth,
4674 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4675 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4676 DiagnosticSeverity::HINT => &mut self.hint_depth,
4677 _ => return,
4678 };
4679 if endpoint.is_start {
4680 *depth += 1;
4681 } else {
4682 *depth -= 1;
4683 }
4684
4685 if endpoint.is_unnecessary {
4686 if endpoint.is_start {
4687 self.unnecessary_depth += 1;
4688 } else {
4689 self.unnecessary_depth -= 1;
4690 }
4691 }
4692 }
4693
4694 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4695 if self.error_depth > 0 {
4696 Some(DiagnosticSeverity::ERROR)
4697 } else if self.warning_depth > 0 {
4698 Some(DiagnosticSeverity::WARNING)
4699 } else if self.information_depth > 0 {
4700 Some(DiagnosticSeverity::INFORMATION)
4701 } else if self.hint_depth > 0 {
4702 Some(DiagnosticSeverity::HINT)
4703 } else {
4704 None
4705 }
4706 }
4707
4708 fn current_code_is_unnecessary(&self) -> bool {
4709 self.unnecessary_depth > 0
4710 }
4711}
4712
4713impl<'a> Iterator for BufferChunks<'a> {
4714 type Item = Chunk<'a>;
4715
4716 fn next(&mut self) -> Option<Self::Item> {
4717 let mut next_capture_start = usize::MAX;
4718 let mut next_diagnostic_endpoint = usize::MAX;
4719
4720 if let Some(highlights) = self.highlights.as_mut() {
4721 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4722 if *parent_capture_end <= self.range.start {
4723 highlights.stack.pop();
4724 } else {
4725 break;
4726 }
4727 }
4728
4729 if highlights.next_capture.is_none() {
4730 highlights.next_capture = highlights.captures.next();
4731 }
4732
4733 while let Some(capture) = highlights.next_capture.as_ref() {
4734 if self.range.start < capture.node.start_byte() {
4735 next_capture_start = capture.node.start_byte();
4736 break;
4737 } else {
4738 let highlight_id =
4739 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4740 highlights
4741 .stack
4742 .push((capture.node.end_byte(), highlight_id));
4743 highlights.next_capture = highlights.captures.next();
4744 }
4745 }
4746 }
4747
4748 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4749 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4750 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4751 if endpoint.offset <= self.range.start {
4752 self.update_diagnostic_depths(endpoint);
4753 diagnostic_endpoints.next();
4754 self.underline = endpoint.underline;
4755 } else {
4756 next_diagnostic_endpoint = endpoint.offset;
4757 break;
4758 }
4759 }
4760 }
4761 self.diagnostic_endpoints = diagnostic_endpoints;
4762
4763 if let Some(chunk) = self.chunks.peek() {
4764 let chunk_start = self.range.start;
4765 let mut chunk_end = (self.chunks.offset() + chunk.len())
4766 .min(next_capture_start)
4767 .min(next_diagnostic_endpoint);
4768 let mut highlight_id = None;
4769 if let Some(highlights) = self.highlights.as_ref()
4770 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4771 chunk_end = chunk_end.min(*parent_capture_end);
4772 highlight_id = Some(*parent_highlight_id);
4773 }
4774
4775 let slice =
4776 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4777 self.range.start = chunk_end;
4778 if self.range.start == self.chunks.offset() + chunk.len() {
4779 self.chunks.next().unwrap();
4780 }
4781
4782 Some(Chunk {
4783 text: slice,
4784 syntax_highlight_id: highlight_id,
4785 underline: self.underline,
4786 diagnostic_severity: self.current_diagnostic_severity(),
4787 is_unnecessary: self.current_code_is_unnecessary(),
4788 ..Chunk::default()
4789 })
4790 } else {
4791 None
4792 }
4793 }
4794}
4795
4796impl operation_queue::Operation for Operation {
4797 fn lamport_timestamp(&self) -> clock::Lamport {
4798 match self {
4799 Operation::Buffer(_) => {
4800 unreachable!("buffer operations should never be deferred at this layer")
4801 }
4802 Operation::UpdateDiagnostics {
4803 lamport_timestamp, ..
4804 }
4805 | Operation::UpdateSelections {
4806 lamport_timestamp, ..
4807 }
4808 | Operation::UpdateCompletionTriggers {
4809 lamport_timestamp, ..
4810 } => *lamport_timestamp,
4811 }
4812 }
4813}
4814
4815impl Default for Diagnostic {
4816 fn default() -> Self {
4817 Self {
4818 source: Default::default(),
4819 source_kind: DiagnosticSourceKind::Other,
4820 code: None,
4821 code_description: None,
4822 severity: DiagnosticSeverity::ERROR,
4823 message: Default::default(),
4824 markdown: None,
4825 group_id: 0,
4826 is_primary: false,
4827 is_disk_based: false,
4828 is_unnecessary: false,
4829 underline: true,
4830 data: None,
4831 }
4832 }
4833}
4834
4835impl IndentSize {
4836 /// Returns an [`IndentSize`] representing the given spaces.
4837 pub fn spaces(len: u32) -> Self {
4838 Self {
4839 len,
4840 kind: IndentKind::Space,
4841 }
4842 }
4843
4844 /// Returns an [`IndentSize`] representing a tab.
4845 pub fn tab() -> Self {
4846 Self {
4847 len: 1,
4848 kind: IndentKind::Tab,
4849 }
4850 }
4851
4852 /// An iterator over the characters represented by this [`IndentSize`].
4853 pub fn chars(&self) -> impl Iterator<Item = char> {
4854 iter::repeat(self.char()).take(self.len as usize)
4855 }
4856
4857 /// The character representation of this [`IndentSize`].
4858 pub fn char(&self) -> char {
4859 match self.kind {
4860 IndentKind::Space => ' ',
4861 IndentKind::Tab => '\t',
4862 }
4863 }
4864
4865 /// Consumes the current [`IndentSize`] and returns a new one that has
4866 /// been shrunk or enlarged by the given size along the given direction.
4867 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4868 match direction {
4869 Ordering::Less => {
4870 if self.kind == size.kind && self.len >= size.len {
4871 self.len -= size.len;
4872 }
4873 }
4874 Ordering::Equal => {}
4875 Ordering::Greater => {
4876 if self.len == 0 {
4877 self = size;
4878 } else if self.kind == size.kind {
4879 self.len += size.len;
4880 }
4881 }
4882 }
4883 self
4884 }
4885
4886 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4887 match self.kind {
4888 IndentKind::Space => self.len as usize,
4889 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4890 }
4891 }
4892}
4893
4894#[cfg(any(test, feature = "test-support"))]
4895pub struct TestFile {
4896 pub path: Arc<Path>,
4897 pub root_name: String,
4898 pub local_root: Option<PathBuf>,
4899}
4900
4901#[cfg(any(test, feature = "test-support"))]
4902impl File for TestFile {
4903 fn path(&self) -> &Arc<Path> {
4904 &self.path
4905 }
4906
4907 fn full_path(&self, _: &gpui::App) -> PathBuf {
4908 PathBuf::from(&self.root_name).join(self.path.as_ref())
4909 }
4910
4911 fn as_local(&self) -> Option<&dyn LocalFile> {
4912 if self.local_root.is_some() {
4913 Some(self)
4914 } else {
4915 None
4916 }
4917 }
4918
4919 fn disk_state(&self) -> DiskState {
4920 unimplemented!()
4921 }
4922
4923 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4924 self.path().file_name().unwrap_or(self.root_name.as_ref())
4925 }
4926
4927 fn worktree_id(&self, _: &App) -> WorktreeId {
4928 WorktreeId::from_usize(0)
4929 }
4930
4931 fn to_proto(&self, _: &App) -> rpc::proto::File {
4932 unimplemented!()
4933 }
4934
4935 fn is_private(&self) -> bool {
4936 false
4937 }
4938}
4939
4940#[cfg(any(test, feature = "test-support"))]
4941impl LocalFile for TestFile {
4942 fn abs_path(&self, _cx: &App) -> PathBuf {
4943 PathBuf::from(self.local_root.as_ref().unwrap())
4944 .join(&self.root_name)
4945 .join(self.path.as_ref())
4946 }
4947
4948 fn load(&self, _cx: &App) -> Task<Result<String>> {
4949 unimplemented!()
4950 }
4951
4952 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4953 unimplemented!()
4954 }
4955}
4956
4957pub(crate) fn contiguous_ranges(
4958 values: impl Iterator<Item = u32>,
4959 max_len: usize,
4960) -> impl Iterator<Item = Range<u32>> {
4961 let mut values = values;
4962 let mut current_range: Option<Range<u32>> = None;
4963 std::iter::from_fn(move || {
4964 loop {
4965 if let Some(value) = values.next() {
4966 if let Some(range) = &mut current_range
4967 && value == range.end && range.len() < max_len {
4968 range.end += 1;
4969 continue;
4970 }
4971
4972 let prev_range = current_range.clone();
4973 current_range = Some(value..(value + 1));
4974 if prev_range.is_some() {
4975 return prev_range;
4976 }
4977 } else {
4978 return current_range.take();
4979 }
4980 }
4981 })
4982}
4983
4984#[derive(Default, Debug)]
4985pub struct CharClassifier {
4986 scope: Option<LanguageScope>,
4987 for_completion: bool,
4988 ignore_punctuation: bool,
4989}
4990
4991impl CharClassifier {
4992 pub fn new(scope: Option<LanguageScope>) -> Self {
4993 Self {
4994 scope,
4995 for_completion: false,
4996 ignore_punctuation: false,
4997 }
4998 }
4999
5000 pub fn for_completion(self, for_completion: bool) -> Self {
5001 Self {
5002 for_completion,
5003 ..self
5004 }
5005 }
5006
5007 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5008 Self {
5009 ignore_punctuation,
5010 ..self
5011 }
5012 }
5013
5014 pub fn is_whitespace(&self, c: char) -> bool {
5015 self.kind(c) == CharKind::Whitespace
5016 }
5017
5018 pub fn is_word(&self, c: char) -> bool {
5019 self.kind(c) == CharKind::Word
5020 }
5021
5022 pub fn is_punctuation(&self, c: char) -> bool {
5023 self.kind(c) == CharKind::Punctuation
5024 }
5025
5026 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5027 if c.is_alphanumeric() || c == '_' {
5028 return CharKind::Word;
5029 }
5030
5031 if let Some(scope) = &self.scope {
5032 let characters = if self.for_completion {
5033 scope.completion_query_characters()
5034 } else {
5035 scope.word_characters()
5036 };
5037 if let Some(characters) = characters
5038 && characters.contains(&c) {
5039 return CharKind::Word;
5040 }
5041 }
5042
5043 if c.is_whitespace() {
5044 return CharKind::Whitespace;
5045 }
5046
5047 if ignore_punctuation {
5048 CharKind::Word
5049 } else {
5050 CharKind::Punctuation
5051 }
5052 }
5053
5054 pub fn kind(&self, c: char) -> CharKind {
5055 self.kind_with(c, self.ignore_punctuation)
5056 }
5057}
5058
5059/// Find all of the ranges of whitespace that occur at the ends of lines
5060/// in the given rope.
5061///
5062/// This could also be done with a regex search, but this implementation
5063/// avoids copying text.
5064pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5065 let mut ranges = Vec::new();
5066
5067 let mut offset = 0;
5068 let mut prev_chunk_trailing_whitespace_range = 0..0;
5069 for chunk in rope.chunks() {
5070 let mut prev_line_trailing_whitespace_range = 0..0;
5071 for (i, line) in chunk.split('\n').enumerate() {
5072 let line_end_offset = offset + line.len();
5073 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5074 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5075
5076 if i == 0 && trimmed_line_len == 0 {
5077 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5078 }
5079 if !prev_line_trailing_whitespace_range.is_empty() {
5080 ranges.push(prev_line_trailing_whitespace_range);
5081 }
5082
5083 offset = line_end_offset + 1;
5084 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5085 }
5086
5087 offset -= 1;
5088 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5089 }
5090
5091 if !prev_chunk_trailing_whitespace_range.is_empty() {
5092 ranges.push(prev_chunk_trailing_whitespace_range);
5093 }
5094
5095 ranges
5096}