1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::{SettingsUi, WorktreeId};
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(
177 Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
178)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underline,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<NumberOrString>,
207 pub code_description: Option<lsp::Uri>,
208 /// Whether this diagnostic is a hint, warning, or error.
209 pub severity: DiagnosticSeverity,
210 /// The human-readable message associated with this diagnostic.
211 pub message: String,
212 /// The human-readable message (in markdown format)
213 pub markdown: Option<String>,
214 /// An id that identifies the group to which this diagnostic belongs.
215 ///
216 /// When a language server produces a diagnostic with
217 /// one or more associated diagnostics, those diagnostics are all
218 /// assigned a single group ID.
219 pub group_id: usize,
220 /// Whether this diagnostic is the primary diagnostic for its group.
221 ///
222 /// In a given group, the primary diagnostic is the top-level diagnostic
223 /// returned by the language server. The non-primary diagnostics are the
224 /// associated diagnostics.
225 pub is_primary: bool,
226 /// Whether this diagnostic is considered to originate from an analysis of
227 /// files on disk, as opposed to any unsaved buffer contents. This is a
228 /// property of a given diagnostic source, and is configured for a given
229 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
230 /// for the language server.
231 pub is_disk_based: bool,
232 /// Whether this diagnostic marks unnecessary code.
233 pub is_unnecessary: bool,
234 /// Quick separation of diagnostics groups based by their source.
235 pub source_kind: DiagnosticSourceKind,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238 /// Whether to underline the corresponding text range in the editor.
239 pub underline: bool,
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
243pub enum DiagnosticSourceKind {
244 Pulled,
245 Pushed,
246 Other,
247}
248
249/// An operation used to synchronize this buffer with its other replicas.
250#[derive(Clone, Debug, PartialEq)]
251pub enum Operation {
252 /// A text operation.
253 Buffer(text::Operation),
254
255 /// An update to the buffer's diagnostics.
256 UpdateDiagnostics {
257 /// The id of the language server that produced the new diagnostics.
258 server_id: LanguageServerId,
259 /// The diagnostics.
260 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
261 /// The buffer's lamport timestamp.
262 lamport_timestamp: clock::Lamport,
263 },
264
265 /// An update to the most recent selections in this buffer.
266 UpdateSelections {
267 /// The selections.
268 selections: Arc<[Selection<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// Whether the selections are in 'line mode'.
272 line_mode: bool,
273 /// The [`CursorShape`] associated with these selections.
274 cursor_shape: CursorShape,
275 },
276
277 /// An update to the characters that should trigger autocompletion
278 /// for this buffer.
279 UpdateCompletionTriggers {
280 /// The characters that trigger autocompletion.
281 triggers: Vec<String>,
282 /// The buffer's lamport timestamp.
283 lamport_timestamp: clock::Lamport,
284 /// The language server ID.
285 server_id: LanguageServerId,
286 },
287}
288
289/// An event that occurs in a buffer.
290#[derive(Clone, Debug, PartialEq)]
291pub enum BufferEvent {
292 /// The buffer was changed in a way that must be
293 /// propagated to its other replicas.
294 Operation {
295 operation: Operation,
296 is_local: bool,
297 },
298 /// The buffer was edited.
299 Edited,
300 /// The buffer's `dirty` bit changed.
301 DirtyChanged,
302 /// The buffer was saved.
303 Saved,
304 /// The buffer's file was changed on disk.
305 FileHandleChanged,
306 /// The buffer was reloaded.
307 Reloaded,
308 /// The buffer is in need of a reload
309 ReloadNeeded,
310 /// The buffer's language was changed.
311 LanguageChanged,
312 /// The buffer's syntax trees were updated.
313 Reparsed,
314 /// The buffer's diagnostics were updated.
315 DiagnosticsUpdated,
316 /// The buffer gained or lost editing capabilities.
317 CapabilityChanged,
318}
319
320/// The file associated with a buffer.
321pub trait File: Send + Sync + Any {
322 /// Returns the [`LocalFile`] associated with this file, if the
323 /// file is local.
324 fn as_local(&self) -> Option<&dyn LocalFile>;
325
326 /// Returns whether this file is local.
327 fn is_local(&self) -> bool {
328 self.as_local().is_some()
329 }
330
331 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
332 /// only available in some states, such as modification time.
333 fn disk_state(&self) -> DiskState;
334
335 /// Returns the path of this file relative to the worktree's root directory.
336 fn path(&self) -> &Arc<Path>;
337
338 /// Returns the path of this file relative to the worktree's parent directory (this means it
339 /// includes the name of the worktree's root folder).
340 fn full_path(&self, cx: &App) -> PathBuf;
341
342 /// Returns the last component of this handle's absolute path. If this handle refers to the root
343 /// of its worktree, then this method will return the name of the worktree itself.
344 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
345
346 /// Returns the id of the worktree to which this file belongs.
347 ///
348 /// This is needed for looking up project-specific settings.
349 fn worktree_id(&self, cx: &App) -> WorktreeId;
350
351 /// Converts this file into a protobuf message.
352 fn to_proto(&self, cx: &App) -> rpc::proto::File;
353
354 /// Return whether Zed considers this to be a private file.
355 fn is_private(&self) -> bool;
356}
357
358/// The file's storage status - whether it's stored (`Present`), and if so when it was last
359/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
360/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
361/// indicator for new files.
362#[derive(Copy, Clone, Debug, PartialEq)]
363pub enum DiskState {
364 /// File created in Zed that has not been saved.
365 New,
366 /// File present on the filesystem.
367 Present { mtime: MTime },
368 /// Deleted file that was previously present.
369 Deleted,
370}
371
372impl DiskState {
373 /// Returns the file's last known modification time on disk.
374 pub fn mtime(self) -> Option<MTime> {
375 match self {
376 DiskState::New => None,
377 DiskState::Present { mtime } => Some(mtime),
378 DiskState::Deleted => None,
379 }
380 }
381
382 pub fn exists(&self) -> bool {
383 match self {
384 DiskState::New => false,
385 DiskState::Present { .. } => true,
386 DiskState::Deleted => false,
387 }
388 }
389}
390
391/// The file associated with a buffer, in the case where the file is on the local disk.
392pub trait LocalFile: File {
393 /// Returns the absolute path of this file
394 fn abs_path(&self, cx: &App) -> PathBuf;
395
396 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
397 fn load(&self, cx: &App) -> Task<Result<String>>;
398
399 /// Loads the file's contents from disk.
400 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
401}
402
403/// The auto-indent behavior associated with an editing operation.
404/// For some editing operations, each affected line of text has its
405/// indentation recomputed. For other operations, the entire block
406/// of edited text is adjusted uniformly.
407#[derive(Clone, Debug)]
408pub enum AutoindentMode {
409 /// Indent each line of inserted text.
410 EachLine,
411 /// Apply the same indentation adjustment to all of the lines
412 /// in a given insertion.
413 Block {
414 /// The original indentation column of the first line of each
415 /// insertion, if it has been copied.
416 ///
417 /// Knowing this makes it possible to preserve the relative indentation
418 /// of every line in the insertion from when it was copied.
419 ///
420 /// If the original indent column is `a`, and the first line of insertion
421 /// is then auto-indented to column `b`, then every other line of
422 /// the insertion will be auto-indented to column `b - a`
423 original_indent_columns: Vec<Option<u32>>,
424 },
425}
426
427#[derive(Clone)]
428struct AutoindentRequest {
429 before_edit: BufferSnapshot,
430 entries: Vec<AutoindentRequestEntry>,
431 is_block_mode: bool,
432 ignore_empty_lines: bool,
433}
434
435#[derive(Debug, Clone)]
436struct AutoindentRequestEntry {
437 /// A range of the buffer whose indentation should be adjusted.
438 range: Range<Anchor>,
439 /// Whether or not these lines should be considered brand new, for the
440 /// purpose of auto-indent. When text is not new, its indentation will
441 /// only be adjusted if the suggested indentation level has *changed*
442 /// since the edit was made.
443 first_line_is_new: bool,
444 indent_size: IndentSize,
445 original_indent_column: Option<u32>,
446}
447
448#[derive(Debug)]
449struct IndentSuggestion {
450 basis_row: u32,
451 delta: Ordering,
452 within_error: bool,
453}
454
455struct BufferChunkHighlights<'a> {
456 captures: SyntaxMapCaptures<'a>,
457 next_capture: Option<SyntaxMapCapture<'a>>,
458 stack: Vec<(usize, HighlightId)>,
459 highlight_maps: Vec<HighlightMap>,
460}
461
462/// An iterator that yields chunks of a buffer's text, along with their
463/// syntax highlights and diagnostic status.
464pub struct BufferChunks<'a> {
465 buffer_snapshot: Option<&'a BufferSnapshot>,
466 range: Range<usize>,
467 chunks: text::Chunks<'a>,
468 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
469 error_depth: usize,
470 warning_depth: usize,
471 information_depth: usize,
472 hint_depth: usize,
473 unnecessary_depth: usize,
474 underline: bool,
475 highlights: Option<BufferChunkHighlights<'a>>,
476}
477
478/// A chunk of a buffer's text, along with its syntax highlight and
479/// diagnostic status.
480#[derive(Clone, Debug, Default)]
481pub struct Chunk<'a> {
482 /// The text of the chunk.
483 pub text: &'a str,
484 /// The syntax highlighting style of the chunk.
485 pub syntax_highlight_id: Option<HighlightId>,
486 /// The highlight style that has been applied to this chunk in
487 /// the editor.
488 pub highlight_style: Option<HighlightStyle>,
489 /// The severity of diagnostic associated with this chunk, if any.
490 pub diagnostic_severity: Option<DiagnosticSeverity>,
491 /// Whether this chunk of text is marked as unnecessary.
492 pub is_unnecessary: bool,
493 /// Whether this chunk of text was originally a tab character.
494 pub is_tab: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_inlay: bool,
497 /// Whether to underline the corresponding text range in the editor.
498 pub underline: bool,
499}
500
501/// A set of edits to a given version of a buffer, computed asynchronously.
502#[derive(Debug)]
503pub struct Diff {
504 pub base_version: clock::Global,
505 pub line_ending: LineEnding,
506 pub edits: Vec<(Range<usize>, Arc<str>)>,
507}
508
509#[derive(Debug, Clone, Copy)]
510pub(crate) struct DiagnosticEndpoint {
511 offset: usize,
512 is_start: bool,
513 underline: bool,
514 severity: DiagnosticSeverity,
515 is_unnecessary: bool,
516}
517
518/// A class of characters, used for characterizing a run of text.
519#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
520pub enum CharKind {
521 /// Whitespace.
522 Whitespace,
523 /// Punctuation.
524 Punctuation,
525 /// Word.
526 Word,
527}
528
529/// A runnable is a set of data about a region that could be resolved into a task
530pub struct Runnable {
531 pub tags: SmallVec<[RunnableTag; 1]>,
532 pub language: Arc<Language>,
533 pub buffer: BufferId,
534}
535
536#[derive(Default, Clone, Debug)]
537pub struct HighlightedText {
538 pub text: SharedString,
539 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
540}
541
542#[derive(Default, Debug)]
543struct HighlightedTextBuilder {
544 pub text: String,
545 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
546}
547
548impl HighlightedText {
549 pub fn from_buffer_range<T: ToOffset>(
550 range: Range<T>,
551 snapshot: &text::BufferSnapshot,
552 syntax_snapshot: &SyntaxSnapshot,
553 override_style: Option<HighlightStyle>,
554 syntax_theme: &SyntaxTheme,
555 ) -> Self {
556 let mut highlighted_text = HighlightedTextBuilder::default();
557 highlighted_text.add_text_from_buffer_range(
558 range,
559 snapshot,
560 syntax_snapshot,
561 override_style,
562 syntax_theme,
563 );
564 highlighted_text.build()
565 }
566
567 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
568 gpui::StyledText::new(self.text.clone())
569 .with_default_highlights(default_style, self.highlights.iter().cloned())
570 }
571
572 /// Returns the first line without leading whitespace unless highlighted
573 /// and a boolean indicating if there are more lines after
574 pub fn first_line_preview(self) -> (Self, bool) {
575 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
576 let first_line = &self.text[..newline_ix];
577
578 // Trim leading whitespace, unless an edit starts prior to it.
579 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
580 if let Some((first_highlight_range, _)) = self.highlights.first() {
581 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
582 }
583
584 let preview_text = &first_line[preview_start_ix..];
585 let preview_highlights = self
586 .highlights
587 .into_iter()
588 .take_while(|(range, _)| range.start < newline_ix)
589 .filter_map(|(mut range, highlight)| {
590 range.start = range.start.saturating_sub(preview_start_ix);
591 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
592 if range.is_empty() {
593 None
594 } else {
595 Some((range, highlight))
596 }
597 });
598
599 let preview = Self {
600 text: SharedString::new(preview_text),
601 highlights: preview_highlights.collect(),
602 };
603
604 (preview, self.text.len() > newline_ix)
605 }
606}
607
608impl HighlightedTextBuilder {
609 pub fn build(self) -> HighlightedText {
610 HighlightedText {
611 text: self.text.into(),
612 highlights: self.highlights,
613 }
614 }
615
616 pub fn add_text_from_buffer_range<T: ToOffset>(
617 &mut self,
618 range: Range<T>,
619 snapshot: &text::BufferSnapshot,
620 syntax_snapshot: &SyntaxSnapshot,
621 override_style: Option<HighlightStyle>,
622 syntax_theme: &SyntaxTheme,
623 ) {
624 let range = range.to_offset(snapshot);
625 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
626 let start = self.text.len();
627 self.text.push_str(chunk.text);
628 let end = self.text.len();
629
630 if let Some(mut highlight_style) = chunk
631 .syntax_highlight_id
632 .and_then(|id| id.style(syntax_theme))
633 {
634 if let Some(override_style) = override_style {
635 highlight_style.highlight(override_style);
636 }
637 self.highlights.push((start..end, highlight_style));
638 } else if let Some(override_style) = override_style {
639 self.highlights.push((start..end, override_style));
640 }
641 }
642 }
643
644 fn highlighted_chunks<'a>(
645 range: Range<usize>,
646 snapshot: &'a text::BufferSnapshot,
647 syntax_snapshot: &'a SyntaxSnapshot,
648 ) -> BufferChunks<'a> {
649 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
650 grammar.highlights_query.as_ref()
651 });
652
653 let highlight_maps = captures
654 .grammars()
655 .iter()
656 .map(|grammar| grammar.highlight_map())
657 .collect();
658
659 BufferChunks::new(
660 snapshot.as_rope(),
661 range,
662 Some((captures, highlight_maps)),
663 false,
664 None,
665 )
666 }
667}
668
669#[derive(Clone)]
670pub struct EditPreview {
671 old_snapshot: text::BufferSnapshot,
672 applied_edits_snapshot: text::BufferSnapshot,
673 syntax_snapshot: SyntaxSnapshot,
674}
675
676impl EditPreview {
677 pub fn highlight_edits(
678 &self,
679 current_snapshot: &BufferSnapshot,
680 edits: &[(Range<Anchor>, String)],
681 include_deletions: bool,
682 cx: &App,
683 ) -> HighlightedText {
684 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
685 return HighlightedText::default();
686 };
687
688 let mut highlighted_text = HighlightedTextBuilder::default();
689
690 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
691
692 let insertion_highlight_style = HighlightStyle {
693 background_color: Some(cx.theme().status().created_background),
694 ..Default::default()
695 };
696 let deletion_highlight_style = HighlightStyle {
697 background_color: Some(cx.theme().status().deleted_background),
698 ..Default::default()
699 };
700 let syntax_theme = cx.theme().syntax();
701
702 for (range, edit_text) in edits {
703 let edit_new_end_in_preview_snapshot = range
704 .end
705 .bias_right(&self.old_snapshot)
706 .to_offset(&self.applied_edits_snapshot);
707 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
708
709 let unchanged_range_in_preview_snapshot =
710 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
711 if !unchanged_range_in_preview_snapshot.is_empty() {
712 highlighted_text.add_text_from_buffer_range(
713 unchanged_range_in_preview_snapshot,
714 &self.applied_edits_snapshot,
715 &self.syntax_snapshot,
716 None,
717 syntax_theme,
718 );
719 }
720
721 let range_in_current_snapshot = range.to_offset(current_snapshot);
722 if include_deletions && !range_in_current_snapshot.is_empty() {
723 highlighted_text.add_text_from_buffer_range(
724 range_in_current_snapshot,
725 ¤t_snapshot.text,
726 ¤t_snapshot.syntax,
727 Some(deletion_highlight_style),
728 syntax_theme,
729 );
730 }
731
732 if !edit_text.is_empty() {
733 highlighted_text.add_text_from_buffer_range(
734 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
735 &self.applied_edits_snapshot,
736 &self.syntax_snapshot,
737 Some(insertion_highlight_style),
738 syntax_theme,
739 );
740 }
741
742 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
743 }
744
745 highlighted_text.add_text_from_buffer_range(
746 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
747 &self.applied_edits_snapshot,
748 &self.syntax_snapshot,
749 None,
750 syntax_theme,
751 );
752
753 highlighted_text.build()
754 }
755
756 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
757 let (first, _) = edits.first()?;
758 let (last, _) = edits.last()?;
759
760 let start = first
761 .start
762 .bias_left(&self.old_snapshot)
763 .to_point(&self.applied_edits_snapshot);
764 let end = last
765 .end
766 .bias_right(&self.old_snapshot)
767 .to_point(&self.applied_edits_snapshot);
768
769 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
770 let range = Point::new(start.row, 0)
771 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
772
773 Some(range.to_offset(&self.applied_edits_snapshot))
774 }
775}
776
777#[derive(Clone, Debug, PartialEq, Eq)]
778pub struct BracketMatch {
779 pub open_range: Range<usize>,
780 pub close_range: Range<usize>,
781 pub newline_only: bool,
782}
783
784impl Buffer {
785 /// Create a new buffer with the given base text.
786 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
787 Self::build(
788 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
789 None,
790 Capability::ReadWrite,
791 )
792 }
793
794 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
795 pub fn local_normalized(
796 base_text_normalized: Rope,
797 line_ending: LineEnding,
798 cx: &Context<Self>,
799 ) -> Self {
800 Self::build(
801 TextBuffer::new_normalized(
802 0,
803 cx.entity_id().as_non_zero_u64().into(),
804 line_ending,
805 base_text_normalized,
806 ),
807 None,
808 Capability::ReadWrite,
809 )
810 }
811
812 /// Create a new buffer that is a replica of a remote buffer.
813 pub fn remote(
814 remote_id: BufferId,
815 replica_id: ReplicaId,
816 capability: Capability,
817 base_text: impl Into<String>,
818 ) -> Self {
819 Self::build(
820 TextBuffer::new(replica_id, remote_id, base_text.into()),
821 None,
822 capability,
823 )
824 }
825
826 /// Create a new buffer that is a replica of a remote buffer, populating its
827 /// state from the given protobuf message.
828 pub fn from_proto(
829 replica_id: ReplicaId,
830 capability: Capability,
831 message: proto::BufferState,
832 file: Option<Arc<dyn File>>,
833 ) -> Result<Self> {
834 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
835 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
836 let mut this = Self::build(buffer, file, capability);
837 this.text.set_line_ending(proto::deserialize_line_ending(
838 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
839 ));
840 this.saved_version = proto::deserialize_version(&message.saved_version);
841 this.saved_mtime = message.saved_mtime.map(|time| time.into());
842 Ok(this)
843 }
844
845 /// Serialize the buffer's state to a protobuf message.
846 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
847 proto::BufferState {
848 id: self.remote_id().into(),
849 file: self.file.as_ref().map(|f| f.to_proto(cx)),
850 base_text: self.base_text().to_string(),
851 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
852 saved_version: proto::serialize_version(&self.saved_version),
853 saved_mtime: self.saved_mtime.map(|time| time.into()),
854 }
855 }
856
857 /// Serialize as protobufs all of the changes to the buffer since the given version.
858 pub fn serialize_ops(
859 &self,
860 since: Option<clock::Global>,
861 cx: &App,
862 ) -> Task<Vec<proto::Operation>> {
863 let mut operations = Vec::new();
864 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
865
866 operations.extend(self.remote_selections.iter().map(|(_, set)| {
867 proto::serialize_operation(&Operation::UpdateSelections {
868 selections: set.selections.clone(),
869 lamport_timestamp: set.lamport_timestamp,
870 line_mode: set.line_mode,
871 cursor_shape: set.cursor_shape,
872 })
873 }));
874
875 for (server_id, diagnostics) in &self.diagnostics {
876 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
877 lamport_timestamp: self.diagnostics_timestamp,
878 server_id: *server_id,
879 diagnostics: diagnostics.iter().cloned().collect(),
880 }));
881 }
882
883 for (server_id, completions) in &self.completion_triggers_per_language_server {
884 operations.push(proto::serialize_operation(
885 &Operation::UpdateCompletionTriggers {
886 triggers: completions.iter().cloned().collect(),
887 lamport_timestamp: self.completion_triggers_timestamp,
888 server_id: *server_id,
889 },
890 ));
891 }
892
893 let text_operations = self.text.operations().clone();
894 cx.background_spawn(async move {
895 let since = since.unwrap_or_default();
896 operations.extend(
897 text_operations
898 .iter()
899 .filter(|(_, op)| !since.observed(op.timestamp()))
900 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
901 );
902 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
903 operations
904 })
905 }
906
907 /// Assign a language to the buffer, returning the buffer.
908 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
909 self.set_language(Some(language), cx);
910 self
911 }
912
913 /// Returns the [`Capability`] of this buffer.
914 pub fn capability(&self) -> Capability {
915 self.capability
916 }
917
918 /// Whether this buffer can only be read.
919 pub fn read_only(&self) -> bool {
920 self.capability == Capability::ReadOnly
921 }
922
923 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
924 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
925 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
926 let snapshot = buffer.snapshot();
927 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
928 Self {
929 saved_mtime,
930 saved_version: buffer.version(),
931 preview_version: buffer.version(),
932 reload_task: None,
933 transaction_depth: 0,
934 was_dirty_before_starting_transaction: None,
935 has_unsaved_edits: Cell::new((buffer.version(), false)),
936 text: buffer,
937 branch_state: None,
938 file,
939 capability,
940 syntax_map,
941 reparse: None,
942 non_text_state_update_count: 0,
943 sync_parse_timeout: Duration::from_millis(1),
944 parse_status: watch::channel(ParseStatus::Idle),
945 autoindent_requests: Default::default(),
946 wait_for_autoindent_txs: Default::default(),
947 pending_autoindent: Default::default(),
948 language: None,
949 remote_selections: Default::default(),
950 diagnostics: Default::default(),
951 diagnostics_timestamp: Default::default(),
952 completion_triggers: Default::default(),
953 completion_triggers_per_language_server: Default::default(),
954 completion_triggers_timestamp: Default::default(),
955 deferred_ops: OperationQueue::new(),
956 has_conflict: false,
957 change_bits: Default::default(),
958 _subscriptions: Vec::new(),
959 }
960 }
961
962 pub fn build_snapshot(
963 text: Rope,
964 language: Option<Arc<Language>>,
965 language_registry: Option<Arc<LanguageRegistry>>,
966 cx: &mut App,
967 ) -> impl Future<Output = BufferSnapshot> + use<> {
968 let entity_id = cx.reserve_entity::<Self>().entity_id();
969 let buffer_id = entity_id.as_non_zero_u64().into();
970 async move {
971 let text =
972 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
973 let mut syntax = SyntaxMap::new(&text).snapshot();
974 if let Some(language) = language.clone() {
975 let language_registry = language_registry.clone();
976 syntax.reparse(&text, language_registry, language);
977 }
978 BufferSnapshot {
979 text,
980 syntax,
981 file: None,
982 diagnostics: Default::default(),
983 remote_selections: Default::default(),
984 language,
985 non_text_state_update_count: 0,
986 }
987 }
988 }
989
990 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
991 let entity_id = cx.reserve_entity::<Self>().entity_id();
992 let buffer_id = entity_id.as_non_zero_u64().into();
993 let text =
994 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
995 let syntax = SyntaxMap::new(&text).snapshot();
996 BufferSnapshot {
997 text,
998 syntax,
999 file: None,
1000 diagnostics: Default::default(),
1001 remote_selections: Default::default(),
1002 language: None,
1003 non_text_state_update_count: 0,
1004 }
1005 }
1006
1007 #[cfg(any(test, feature = "test-support"))]
1008 pub fn build_snapshot_sync(
1009 text: Rope,
1010 language: Option<Arc<Language>>,
1011 language_registry: Option<Arc<LanguageRegistry>>,
1012 cx: &mut App,
1013 ) -> BufferSnapshot {
1014 let entity_id = cx.reserve_entity::<Self>().entity_id();
1015 let buffer_id = entity_id.as_non_zero_u64().into();
1016 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1017 let mut syntax = SyntaxMap::new(&text).snapshot();
1018 if let Some(language) = language.clone() {
1019 syntax.reparse(&text, language_registry, language);
1020 }
1021 BufferSnapshot {
1022 text,
1023 syntax,
1024 file: None,
1025 diagnostics: Default::default(),
1026 remote_selections: Default::default(),
1027 language,
1028 non_text_state_update_count: 0,
1029 }
1030 }
1031
1032 /// Retrieve a snapshot of the buffer's current state. This is computationally
1033 /// cheap, and allows reading from the buffer on a background thread.
1034 pub fn snapshot(&self) -> BufferSnapshot {
1035 let text = self.text.snapshot();
1036 let mut syntax_map = self.syntax_map.lock();
1037 syntax_map.interpolate(&text);
1038 let syntax = syntax_map.snapshot();
1039
1040 BufferSnapshot {
1041 text,
1042 syntax,
1043 file: self.file.clone(),
1044 remote_selections: self.remote_selections.clone(),
1045 diagnostics: self.diagnostics.clone(),
1046 language: self.language.clone(),
1047 non_text_state_update_count: self.non_text_state_update_count,
1048 }
1049 }
1050
1051 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1052 let this = cx.entity();
1053 cx.new(|cx| {
1054 let mut branch = Self {
1055 branch_state: Some(BufferBranchState {
1056 base_buffer: this.clone(),
1057 merged_operations: Default::default(),
1058 }),
1059 language: self.language.clone(),
1060 has_conflict: self.has_conflict,
1061 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1062 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1063 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1064 };
1065 if let Some(language_registry) = self.language_registry() {
1066 branch.set_language_registry(language_registry);
1067 }
1068
1069 // Reparse the branch buffer so that we get syntax highlighting immediately.
1070 branch.reparse(cx);
1071
1072 branch
1073 })
1074 }
1075
1076 pub fn preview_edits(
1077 &self,
1078 edits: Arc<[(Range<Anchor>, String)]>,
1079 cx: &App,
1080 ) -> Task<EditPreview> {
1081 let registry = self.language_registry();
1082 let language = self.language().cloned();
1083 let old_snapshot = self.text.snapshot();
1084 let mut branch_buffer = self.text.branch();
1085 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1086 cx.background_spawn(async move {
1087 if !edits.is_empty() {
1088 if let Some(language) = language.clone() {
1089 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1090 }
1091
1092 branch_buffer.edit(edits.iter().cloned());
1093 let snapshot = branch_buffer.snapshot();
1094 syntax_snapshot.interpolate(&snapshot);
1095
1096 if let Some(language) = language {
1097 syntax_snapshot.reparse(&snapshot, registry, language);
1098 }
1099 }
1100 EditPreview {
1101 old_snapshot,
1102 applied_edits_snapshot: branch_buffer.snapshot(),
1103 syntax_snapshot,
1104 }
1105 })
1106 }
1107
1108 /// Applies all of the changes in this buffer that intersect any of the
1109 /// given `ranges` to its base buffer.
1110 ///
1111 /// If `ranges` is empty, then all changes will be applied. This buffer must
1112 /// be a branch buffer to call this method.
1113 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1114 let Some(base_buffer) = self.base_buffer() else {
1115 debug_panic!("not a branch buffer");
1116 return;
1117 };
1118
1119 let mut ranges = if ranges.is_empty() {
1120 &[0..usize::MAX]
1121 } else {
1122 ranges.as_slice()
1123 }
1124 .iter()
1125 .peekable();
1126
1127 let mut edits = Vec::new();
1128 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1129 let mut is_included = false;
1130 while let Some(range) = ranges.peek() {
1131 if range.end < edit.new.start {
1132 ranges.next().unwrap();
1133 } else {
1134 if range.start <= edit.new.end {
1135 is_included = true;
1136 }
1137 break;
1138 }
1139 }
1140
1141 if is_included {
1142 edits.push((
1143 edit.old.clone(),
1144 self.text_for_range(edit.new.clone()).collect::<String>(),
1145 ));
1146 }
1147 }
1148
1149 let operation = base_buffer.update(cx, |base_buffer, cx| {
1150 // cx.emit(BufferEvent::DiffBaseChanged);
1151 base_buffer.edit(edits, None, cx)
1152 });
1153
1154 if let Some(operation) = operation
1155 && let Some(BufferBranchState {
1156 merged_operations, ..
1157 }) = &mut self.branch_state
1158 {
1159 merged_operations.push(operation);
1160 }
1161 }
1162
1163 fn on_base_buffer_event(
1164 &mut self,
1165 _: Entity<Buffer>,
1166 event: &BufferEvent,
1167 cx: &mut Context<Self>,
1168 ) {
1169 let BufferEvent::Operation { operation, .. } = event else {
1170 return;
1171 };
1172 let Some(BufferBranchState {
1173 merged_operations, ..
1174 }) = &mut self.branch_state
1175 else {
1176 return;
1177 };
1178
1179 let mut operation_to_undo = None;
1180 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1181 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1182 {
1183 merged_operations.remove(ix);
1184 operation_to_undo = Some(operation.timestamp);
1185 }
1186
1187 self.apply_ops([operation.clone()], cx);
1188
1189 if let Some(timestamp) = operation_to_undo {
1190 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1191 self.undo_operations(counts, cx);
1192 }
1193 }
1194
1195 #[cfg(test)]
1196 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1197 &self.text
1198 }
1199
1200 /// Retrieve a snapshot of the buffer's raw text, without any
1201 /// language-related state like the syntax tree or diagnostics.
1202 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1203 self.text.snapshot()
1204 }
1205
1206 /// The file associated with the buffer, if any.
1207 pub fn file(&self) -> Option<&Arc<dyn File>> {
1208 self.file.as_ref()
1209 }
1210
1211 /// The version of the buffer that was last saved or reloaded from disk.
1212 pub fn saved_version(&self) -> &clock::Global {
1213 &self.saved_version
1214 }
1215
1216 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1217 pub fn saved_mtime(&self) -> Option<MTime> {
1218 self.saved_mtime
1219 }
1220
1221 /// Assign a language to the buffer.
1222 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1223 self.non_text_state_update_count += 1;
1224 self.syntax_map.lock().clear(&self.text);
1225 self.language = language;
1226 self.was_changed();
1227 self.reparse(cx);
1228 cx.emit(BufferEvent::LanguageChanged);
1229 }
1230
1231 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1232 /// other languages if parts of the buffer are written in different languages.
1233 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1234 self.syntax_map
1235 .lock()
1236 .set_language_registry(language_registry);
1237 }
1238
1239 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1240 self.syntax_map.lock().language_registry()
1241 }
1242
1243 /// Assign the buffer a new [`Capability`].
1244 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1245 if self.capability != capability {
1246 self.capability = capability;
1247 cx.emit(BufferEvent::CapabilityChanged)
1248 }
1249 }
1250
1251 /// This method is called to signal that the buffer has been saved.
1252 pub fn did_save(
1253 &mut self,
1254 version: clock::Global,
1255 mtime: Option<MTime>,
1256 cx: &mut Context<Self>,
1257 ) {
1258 self.saved_version = version;
1259 self.has_unsaved_edits
1260 .set((self.saved_version().clone(), false));
1261 self.has_conflict = false;
1262 self.saved_mtime = mtime;
1263 self.was_changed();
1264 cx.emit(BufferEvent::Saved);
1265 cx.notify();
1266 }
1267
1268 /// Reloads the contents of the buffer from disk.
1269 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1270 let (tx, rx) = futures::channel::oneshot::channel();
1271 let prev_version = self.text.version();
1272 self.reload_task = Some(cx.spawn(async move |this, cx| {
1273 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1274 let file = this.file.as_ref()?.as_local()?;
1275
1276 Some((file.disk_state().mtime(), file.load(cx)))
1277 })?
1278 else {
1279 return Ok(());
1280 };
1281
1282 let new_text = new_text.await?;
1283 let diff = this
1284 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1285 .await;
1286 this.update(cx, |this, cx| {
1287 if this.version() == diff.base_version {
1288 this.finalize_last_transaction();
1289 this.apply_diff(diff, cx);
1290 tx.send(this.finalize_last_transaction().cloned()).ok();
1291 this.has_conflict = false;
1292 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1293 } else {
1294 if !diff.edits.is_empty()
1295 || this
1296 .edits_since::<usize>(&diff.base_version)
1297 .next()
1298 .is_some()
1299 {
1300 this.has_conflict = true;
1301 }
1302
1303 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1304 }
1305
1306 this.reload_task.take();
1307 })
1308 }));
1309 rx
1310 }
1311
1312 /// This method is called to signal that the buffer has been reloaded.
1313 pub fn did_reload(
1314 &mut self,
1315 version: clock::Global,
1316 line_ending: LineEnding,
1317 mtime: Option<MTime>,
1318 cx: &mut Context<Self>,
1319 ) {
1320 self.saved_version = version;
1321 self.has_unsaved_edits
1322 .set((self.saved_version.clone(), false));
1323 self.text.set_line_ending(line_ending);
1324 self.saved_mtime = mtime;
1325 cx.emit(BufferEvent::Reloaded);
1326 cx.notify();
1327 }
1328
1329 /// Updates the [`File`] backing this buffer. This should be called when
1330 /// the file has changed or has been deleted.
1331 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1332 let was_dirty = self.is_dirty();
1333 let mut file_changed = false;
1334
1335 if let Some(old_file) = self.file.as_ref() {
1336 if new_file.path() != old_file.path() {
1337 file_changed = true;
1338 }
1339
1340 let old_state = old_file.disk_state();
1341 let new_state = new_file.disk_state();
1342 if old_state != new_state {
1343 file_changed = true;
1344 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1345 cx.emit(BufferEvent::ReloadNeeded)
1346 }
1347 }
1348 } else {
1349 file_changed = true;
1350 };
1351
1352 self.file = Some(new_file);
1353 if file_changed {
1354 self.was_changed();
1355 self.non_text_state_update_count += 1;
1356 if was_dirty != self.is_dirty() {
1357 cx.emit(BufferEvent::DirtyChanged);
1358 }
1359 cx.emit(BufferEvent::FileHandleChanged);
1360 cx.notify();
1361 }
1362 }
1363
1364 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1365 Some(self.branch_state.as_ref()?.base_buffer.clone())
1366 }
1367
1368 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1369 pub fn language(&self) -> Option<&Arc<Language>> {
1370 self.language.as_ref()
1371 }
1372
1373 /// Returns the [`Language`] at the given location.
1374 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1375 let offset = position.to_offset(self);
1376 let mut is_first = true;
1377 let start_anchor = self.anchor_before(offset);
1378 let end_anchor = self.anchor_after(offset);
1379 self.syntax_map
1380 .lock()
1381 .layers_for_range(offset..offset, &self.text, false)
1382 .filter(|layer| {
1383 if is_first {
1384 is_first = false;
1385 return true;
1386 }
1387
1388 layer
1389 .included_sub_ranges
1390 .map(|sub_ranges| {
1391 sub_ranges.iter().any(|sub_range| {
1392 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1393 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1394 !is_before_start && !is_after_end
1395 })
1396 })
1397 .unwrap_or(true)
1398 })
1399 .last()
1400 .map(|info| info.language.clone())
1401 .or_else(|| self.language.clone())
1402 }
1403
1404 /// Returns each [`Language`] for the active syntax layers at the given location.
1405 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1406 let offset = position.to_offset(self);
1407 let mut languages: Vec<Arc<Language>> = self
1408 .syntax_map
1409 .lock()
1410 .layers_for_range(offset..offset, &self.text, false)
1411 .map(|info| info.language.clone())
1412 .collect();
1413
1414 if languages.is_empty()
1415 && let Some(buffer_language) = self.language()
1416 {
1417 languages.push(buffer_language.clone());
1418 }
1419
1420 languages
1421 }
1422
1423 /// An integer version number that accounts for all updates besides
1424 /// the buffer's text itself (which is versioned via a version vector).
1425 pub fn non_text_state_update_count(&self) -> usize {
1426 self.non_text_state_update_count
1427 }
1428
1429 /// Whether the buffer is being parsed in the background.
1430 #[cfg(any(test, feature = "test-support"))]
1431 pub fn is_parsing(&self) -> bool {
1432 self.reparse.is_some()
1433 }
1434
1435 /// Indicates whether the buffer contains any regions that may be
1436 /// written in a language that hasn't been loaded yet.
1437 pub fn contains_unknown_injections(&self) -> bool {
1438 self.syntax_map.lock().contains_unknown_injections()
1439 }
1440
1441 #[cfg(any(test, feature = "test-support"))]
1442 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1443 self.sync_parse_timeout = timeout;
1444 }
1445
1446 /// Called after an edit to synchronize the buffer's main parse tree with
1447 /// the buffer's new underlying state.
1448 ///
1449 /// Locks the syntax map and interpolates the edits since the last reparse
1450 /// into the foreground syntax tree.
1451 ///
1452 /// Then takes a stable snapshot of the syntax map before unlocking it.
1453 /// The snapshot with the interpolated edits is sent to a background thread,
1454 /// where we ask Tree-sitter to perform an incremental parse.
1455 ///
1456 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1457 /// waiting on the parse to complete. As soon as it completes, we proceed
1458 /// synchronously, unless a 1ms timeout elapses.
1459 ///
1460 /// If we time out waiting on the parse, we spawn a second task waiting
1461 /// until the parse does complete and return with the interpolated tree still
1462 /// in the foreground. When the background parse completes, call back into
1463 /// the main thread and assign the foreground parse state.
1464 ///
1465 /// If the buffer or grammar changed since the start of the background parse,
1466 /// initiate an additional reparse recursively. To avoid concurrent parses
1467 /// for the same buffer, we only initiate a new parse if we are not already
1468 /// parsing in the background.
1469 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1470 if self.reparse.is_some() {
1471 return;
1472 }
1473 let language = if let Some(language) = self.language.clone() {
1474 language
1475 } else {
1476 return;
1477 };
1478
1479 let text = self.text_snapshot();
1480 let parsed_version = self.version();
1481
1482 let mut syntax_map = self.syntax_map.lock();
1483 syntax_map.interpolate(&text);
1484 let language_registry = syntax_map.language_registry();
1485 let mut syntax_snapshot = syntax_map.snapshot();
1486 drop(syntax_map);
1487
1488 let parse_task = cx.background_spawn({
1489 let language = language.clone();
1490 let language_registry = language_registry.clone();
1491 async move {
1492 syntax_snapshot.reparse(&text, language_registry, language);
1493 syntax_snapshot
1494 }
1495 });
1496
1497 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1498 match cx
1499 .background_executor()
1500 .block_with_timeout(self.sync_parse_timeout, parse_task)
1501 {
1502 Ok(new_syntax_snapshot) => {
1503 self.did_finish_parsing(new_syntax_snapshot, cx);
1504 self.reparse = None;
1505 }
1506 Err(parse_task) => {
1507 self.reparse = Some(cx.spawn(async move |this, cx| {
1508 let new_syntax_map = parse_task.await;
1509 this.update(cx, move |this, cx| {
1510 let grammar_changed =
1511 this.language.as_ref().is_none_or(|current_language| {
1512 !Arc::ptr_eq(&language, current_language)
1513 });
1514 let language_registry_changed = new_syntax_map
1515 .contains_unknown_injections()
1516 && language_registry.is_some_and(|registry| {
1517 registry.version() != new_syntax_map.language_registry_version()
1518 });
1519 let parse_again = language_registry_changed
1520 || grammar_changed
1521 || this.version.changed_since(&parsed_version);
1522 this.did_finish_parsing(new_syntax_map, cx);
1523 this.reparse = None;
1524 if parse_again {
1525 this.reparse(cx);
1526 }
1527 })
1528 .ok();
1529 }));
1530 }
1531 }
1532 }
1533
1534 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1535 self.was_changed();
1536 self.non_text_state_update_count += 1;
1537 self.syntax_map.lock().did_parse(syntax_snapshot);
1538 self.request_autoindent(cx);
1539 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1540 cx.emit(BufferEvent::Reparsed);
1541 cx.notify();
1542 }
1543
1544 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1545 self.parse_status.1.clone()
1546 }
1547
1548 /// Assign to the buffer a set of diagnostics created by a given language server.
1549 pub fn update_diagnostics(
1550 &mut self,
1551 server_id: LanguageServerId,
1552 diagnostics: DiagnosticSet,
1553 cx: &mut Context<Self>,
1554 ) {
1555 let lamport_timestamp = self.text.lamport_clock.tick();
1556 let op = Operation::UpdateDiagnostics {
1557 server_id,
1558 diagnostics: diagnostics.iter().cloned().collect(),
1559 lamport_timestamp,
1560 };
1561
1562 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1563 self.send_operation(op, true, cx);
1564 }
1565
1566 pub fn buffer_diagnostics(
1567 &self,
1568 for_server: Option<LanguageServerId>,
1569 ) -> Vec<&DiagnosticEntry<Anchor>> {
1570 match for_server {
1571 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1572 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1573 Err(_) => Vec::new(),
1574 },
1575 None => self
1576 .diagnostics
1577 .iter()
1578 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1579 .collect(),
1580 }
1581 }
1582
1583 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1584 if let Some(indent_sizes) = self.compute_autoindents() {
1585 let indent_sizes = cx.background_spawn(indent_sizes);
1586 match cx
1587 .background_executor()
1588 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1589 {
1590 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1591 Err(indent_sizes) => {
1592 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1593 let indent_sizes = indent_sizes.await;
1594 this.update(cx, |this, cx| {
1595 this.apply_autoindents(indent_sizes, cx);
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 } else {
1602 self.autoindent_requests.clear();
1603 for tx in self.wait_for_autoindent_txs.drain(..) {
1604 tx.send(()).ok();
1605 }
1606 }
1607 }
1608
1609 fn compute_autoindents(
1610 &self,
1611 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1612 let max_rows_between_yields = 100;
1613 let snapshot = self.snapshot();
1614 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1615 return None;
1616 }
1617
1618 let autoindent_requests = self.autoindent_requests.clone();
1619 Some(async move {
1620 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1621 for request in autoindent_requests {
1622 // Resolve each edited range to its row in the current buffer and in the
1623 // buffer before this batch of edits.
1624 let mut row_ranges = Vec::new();
1625 let mut old_to_new_rows = BTreeMap::new();
1626 let mut language_indent_sizes_by_new_row = Vec::new();
1627 for entry in &request.entries {
1628 let position = entry.range.start;
1629 let new_row = position.to_point(&snapshot).row;
1630 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1631 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1632
1633 if !entry.first_line_is_new {
1634 let old_row = position.to_point(&request.before_edit).row;
1635 old_to_new_rows.insert(old_row, new_row);
1636 }
1637 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1638 }
1639
1640 // Build a map containing the suggested indentation for each of the edited lines
1641 // with respect to the state of the buffer before these edits. This map is keyed
1642 // by the rows for these lines in the current state of the buffer.
1643 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1644 let old_edited_ranges =
1645 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1646 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1647 let mut language_indent_size = IndentSize::default();
1648 for old_edited_range in old_edited_ranges {
1649 let suggestions = request
1650 .before_edit
1651 .suggest_autoindents(old_edited_range.clone())
1652 .into_iter()
1653 .flatten();
1654 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1655 if let Some(suggestion) = suggestion {
1656 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1657
1658 // Find the indent size based on the language for this row.
1659 while let Some((row, size)) = language_indent_sizes.peek() {
1660 if *row > new_row {
1661 break;
1662 }
1663 language_indent_size = *size;
1664 language_indent_sizes.next();
1665 }
1666
1667 let suggested_indent = old_to_new_rows
1668 .get(&suggestion.basis_row)
1669 .and_then(|from_row| {
1670 Some(old_suggestions.get(from_row).copied()?.0)
1671 })
1672 .unwrap_or_else(|| {
1673 request
1674 .before_edit
1675 .indent_size_for_line(suggestion.basis_row)
1676 })
1677 .with_delta(suggestion.delta, language_indent_size);
1678 old_suggestions
1679 .insert(new_row, (suggested_indent, suggestion.within_error));
1680 }
1681 }
1682 yield_now().await;
1683 }
1684
1685 // Compute new suggestions for each line, but only include them in the result
1686 // if they differ from the old suggestion for that line.
1687 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1688 let mut language_indent_size = IndentSize::default();
1689 for (row_range, original_indent_column) in row_ranges {
1690 let new_edited_row_range = if request.is_block_mode {
1691 row_range.start..row_range.start + 1
1692 } else {
1693 row_range.clone()
1694 };
1695
1696 let suggestions = snapshot
1697 .suggest_autoindents(new_edited_row_range.clone())
1698 .into_iter()
1699 .flatten();
1700 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1701 if let Some(suggestion) = suggestion {
1702 // Find the indent size based on the language for this row.
1703 while let Some((row, size)) = language_indent_sizes.peek() {
1704 if *row > new_row {
1705 break;
1706 }
1707 language_indent_size = *size;
1708 language_indent_sizes.next();
1709 }
1710
1711 let suggested_indent = indent_sizes
1712 .get(&suggestion.basis_row)
1713 .copied()
1714 .map(|e| e.0)
1715 .unwrap_or_else(|| {
1716 snapshot.indent_size_for_line(suggestion.basis_row)
1717 })
1718 .with_delta(suggestion.delta, language_indent_size);
1719
1720 if old_suggestions.get(&new_row).is_none_or(
1721 |(old_indentation, was_within_error)| {
1722 suggested_indent != *old_indentation
1723 && (!suggestion.within_error || *was_within_error)
1724 },
1725 ) {
1726 indent_sizes.insert(
1727 new_row,
1728 (suggested_indent, request.ignore_empty_lines),
1729 );
1730 }
1731 }
1732 }
1733
1734 if let (true, Some(original_indent_column)) =
1735 (request.is_block_mode, original_indent_column)
1736 {
1737 let new_indent =
1738 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1739 *indent
1740 } else {
1741 snapshot.indent_size_for_line(row_range.start)
1742 };
1743 let delta = new_indent.len as i64 - original_indent_column as i64;
1744 if delta != 0 {
1745 for row in row_range.skip(1) {
1746 indent_sizes.entry(row).or_insert_with(|| {
1747 let mut size = snapshot.indent_size_for_line(row);
1748 if size.kind == new_indent.kind {
1749 match delta.cmp(&0) {
1750 Ordering::Greater => size.len += delta as u32,
1751 Ordering::Less => {
1752 size.len = size.len.saturating_sub(-delta as u32)
1753 }
1754 Ordering::Equal => {}
1755 }
1756 }
1757 (size, request.ignore_empty_lines)
1758 });
1759 }
1760 }
1761 }
1762
1763 yield_now().await;
1764 }
1765 }
1766
1767 indent_sizes
1768 .into_iter()
1769 .filter_map(|(row, (indent, ignore_empty_lines))| {
1770 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1771 None
1772 } else {
1773 Some((row, indent))
1774 }
1775 })
1776 .collect()
1777 })
1778 }
1779
1780 fn apply_autoindents(
1781 &mut self,
1782 indent_sizes: BTreeMap<u32, IndentSize>,
1783 cx: &mut Context<Self>,
1784 ) {
1785 self.autoindent_requests.clear();
1786 for tx in self.wait_for_autoindent_txs.drain(..) {
1787 tx.send(()).ok();
1788 }
1789
1790 let edits: Vec<_> = indent_sizes
1791 .into_iter()
1792 .filter_map(|(row, indent_size)| {
1793 let current_size = indent_size_for_line(self, row);
1794 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1795 })
1796 .collect();
1797
1798 let preserve_preview = self.preserve_preview();
1799 self.edit(edits, None, cx);
1800 if preserve_preview {
1801 self.refresh_preview();
1802 }
1803 }
1804
1805 /// Create a minimal edit that will cause the given row to be indented
1806 /// with the given size. After applying this edit, the length of the line
1807 /// will always be at least `new_size.len`.
1808 pub fn edit_for_indent_size_adjustment(
1809 row: u32,
1810 current_size: IndentSize,
1811 new_size: IndentSize,
1812 ) -> Option<(Range<Point>, String)> {
1813 if new_size.kind == current_size.kind {
1814 match new_size.len.cmp(¤t_size.len) {
1815 Ordering::Greater => {
1816 let point = Point::new(row, 0);
1817 Some((
1818 point..point,
1819 iter::repeat(new_size.char())
1820 .take((new_size.len - current_size.len) as usize)
1821 .collect::<String>(),
1822 ))
1823 }
1824
1825 Ordering::Less => Some((
1826 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1827 String::new(),
1828 )),
1829
1830 Ordering::Equal => None,
1831 }
1832 } else {
1833 Some((
1834 Point::new(row, 0)..Point::new(row, current_size.len),
1835 iter::repeat(new_size.char())
1836 .take(new_size.len as usize)
1837 .collect::<String>(),
1838 ))
1839 }
1840 }
1841
1842 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1843 /// and the given new text.
1844 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1845 let old_text = self.as_rope().clone();
1846 let base_version = self.version();
1847 cx.background_executor()
1848 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1849 let old_text = old_text.to_string();
1850 let line_ending = LineEnding::detect(&new_text);
1851 LineEnding::normalize(&mut new_text);
1852 let edits = text_diff(&old_text, &new_text);
1853 Diff {
1854 base_version,
1855 line_ending,
1856 edits,
1857 }
1858 })
1859 }
1860
1861 /// Spawns a background task that searches the buffer for any whitespace
1862 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1863 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1864 let old_text = self.as_rope().clone();
1865 let line_ending = self.line_ending();
1866 let base_version = self.version();
1867 cx.background_spawn(async move {
1868 let ranges = trailing_whitespace_ranges(&old_text);
1869 let empty = Arc::<str>::from("");
1870 Diff {
1871 base_version,
1872 line_ending,
1873 edits: ranges
1874 .into_iter()
1875 .map(|range| (range, empty.clone()))
1876 .collect(),
1877 }
1878 })
1879 }
1880
1881 /// Ensures that the buffer ends with a single newline character, and
1882 /// no other whitespace. Skips if the buffer is empty.
1883 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1884 let len = self.len();
1885 if len == 0 {
1886 return;
1887 }
1888 let mut offset = len;
1889 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1890 let non_whitespace_len = chunk
1891 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1892 .len();
1893 offset -= chunk.len();
1894 offset += non_whitespace_len;
1895 if non_whitespace_len != 0 {
1896 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1897 return;
1898 }
1899 break;
1900 }
1901 }
1902 self.edit([(offset..len, "\n")], None, cx);
1903 }
1904
1905 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1906 /// calculated, then adjust the diff to account for those changes, and discard any
1907 /// parts of the diff that conflict with those changes.
1908 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1909 let snapshot = self.snapshot();
1910 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1911 let mut delta = 0;
1912 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1913 while let Some(edit_since) = edits_since.peek() {
1914 // If the edit occurs after a diff hunk, then it does not
1915 // affect that hunk.
1916 if edit_since.old.start > range.end {
1917 break;
1918 }
1919 // If the edit precedes the diff hunk, then adjust the hunk
1920 // to reflect the edit.
1921 else if edit_since.old.end < range.start {
1922 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1923 edits_since.next();
1924 }
1925 // If the edit intersects a diff hunk, then discard that hunk.
1926 else {
1927 return None;
1928 }
1929 }
1930
1931 let start = (range.start as i64 + delta) as usize;
1932 let end = (range.end as i64 + delta) as usize;
1933 Some((start..end, new_text))
1934 });
1935
1936 self.start_transaction();
1937 self.text.set_line_ending(diff.line_ending);
1938 self.edit(adjusted_edits, None, cx);
1939 self.end_transaction(cx)
1940 }
1941
1942 fn has_unsaved_edits(&self) -> bool {
1943 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1944
1945 if last_version == self.version {
1946 self.has_unsaved_edits
1947 .set((last_version, has_unsaved_edits));
1948 return has_unsaved_edits;
1949 }
1950
1951 let has_edits = self.has_edits_since(&self.saved_version);
1952 self.has_unsaved_edits
1953 .set((self.version.clone(), has_edits));
1954 has_edits
1955 }
1956
1957 /// Checks if the buffer has unsaved changes.
1958 pub fn is_dirty(&self) -> bool {
1959 if self.capability == Capability::ReadOnly {
1960 return false;
1961 }
1962 if self.has_conflict {
1963 return true;
1964 }
1965 match self.file.as_ref().map(|f| f.disk_state()) {
1966 Some(DiskState::New) | Some(DiskState::Deleted) => {
1967 !self.is_empty() && self.has_unsaved_edits()
1968 }
1969 _ => self.has_unsaved_edits(),
1970 }
1971 }
1972
1973 /// Checks if the buffer and its file have both changed since the buffer
1974 /// was last saved or reloaded.
1975 pub fn has_conflict(&self) -> bool {
1976 if self.has_conflict {
1977 return true;
1978 }
1979 let Some(file) = self.file.as_ref() else {
1980 return false;
1981 };
1982 match file.disk_state() {
1983 DiskState::New => false,
1984 DiskState::Present { mtime } => match self.saved_mtime {
1985 Some(saved_mtime) => {
1986 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1987 }
1988 None => true,
1989 },
1990 DiskState::Deleted => false,
1991 }
1992 }
1993
1994 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1995 pub fn subscribe(&mut self) -> Subscription {
1996 self.text.subscribe()
1997 }
1998
1999 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2000 ///
2001 /// This allows downstream code to check if the buffer's text has changed without
2002 /// waiting for an effect cycle, which would be required if using eents.
2003 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2004 if let Err(ix) = self
2005 .change_bits
2006 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2007 {
2008 self.change_bits.insert(ix, bit);
2009 }
2010 }
2011
2012 fn was_changed(&mut self) {
2013 self.change_bits.retain(|change_bit| {
2014 change_bit.upgrade().is_some_and(|bit| {
2015 bit.replace(true);
2016 true
2017 })
2018 });
2019 }
2020
2021 /// Starts a transaction, if one is not already in-progress. When undoing or
2022 /// redoing edits, all of the edits performed within a transaction are undone
2023 /// or redone together.
2024 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2025 self.start_transaction_at(Instant::now())
2026 }
2027
2028 /// Starts a transaction, providing the current time. Subsequent transactions
2029 /// that occur within a short period of time will be grouped together. This
2030 /// is controlled by the buffer's undo grouping duration.
2031 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2032 self.transaction_depth += 1;
2033 if self.was_dirty_before_starting_transaction.is_none() {
2034 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2035 }
2036 self.text.start_transaction_at(now)
2037 }
2038
2039 /// Terminates the current transaction, if this is the outermost transaction.
2040 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2041 self.end_transaction_at(Instant::now(), cx)
2042 }
2043
2044 /// Terminates the current transaction, providing the current time. Subsequent transactions
2045 /// that occur within a short period of time will be grouped together. This
2046 /// is controlled by the buffer's undo grouping duration.
2047 pub fn end_transaction_at(
2048 &mut self,
2049 now: Instant,
2050 cx: &mut Context<Self>,
2051 ) -> Option<TransactionId> {
2052 assert!(self.transaction_depth > 0);
2053 self.transaction_depth -= 1;
2054 let was_dirty = if self.transaction_depth == 0 {
2055 self.was_dirty_before_starting_transaction.take().unwrap()
2056 } else {
2057 false
2058 };
2059 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2060 self.did_edit(&start_version, was_dirty, cx);
2061 Some(transaction_id)
2062 } else {
2063 None
2064 }
2065 }
2066
2067 /// Manually add a transaction to the buffer's undo history.
2068 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2069 self.text.push_transaction(transaction, now);
2070 }
2071
2072 /// Differs from `push_transaction` in that it does not clear the redo
2073 /// stack. Intended to be used to create a parent transaction to merge
2074 /// potential child transactions into.
2075 ///
2076 /// The caller is responsible for removing it from the undo history using
2077 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2078 /// are merged into this transaction, the caller is responsible for ensuring
2079 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2080 /// cleared is to create transactions with the usual `start_transaction` and
2081 /// `end_transaction` methods and merging the resulting transactions into
2082 /// the transaction created by this method
2083 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2084 self.text.push_empty_transaction(now)
2085 }
2086
2087 /// Prevent the last transaction from being grouped with any subsequent transactions,
2088 /// even if they occur with the buffer's undo grouping duration.
2089 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2090 self.text.finalize_last_transaction()
2091 }
2092
2093 /// Manually group all changes since a given transaction.
2094 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2095 self.text.group_until_transaction(transaction_id);
2096 }
2097
2098 /// Manually remove a transaction from the buffer's undo history
2099 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2100 self.text.forget_transaction(transaction_id)
2101 }
2102
2103 /// Retrieve a transaction from the buffer's undo history
2104 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2105 self.text.get_transaction(transaction_id)
2106 }
2107
2108 /// Manually merge two transactions in the buffer's undo history.
2109 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2110 self.text.merge_transactions(transaction, destination);
2111 }
2112
2113 /// Waits for the buffer to receive operations with the given timestamps.
2114 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2115 &mut self,
2116 edit_ids: It,
2117 ) -> impl Future<Output = Result<()>> + use<It> {
2118 self.text.wait_for_edits(edit_ids)
2119 }
2120
2121 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2122 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2123 &mut self,
2124 anchors: It,
2125 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2126 self.text.wait_for_anchors(anchors)
2127 }
2128
2129 /// Waits for the buffer to receive operations up to the given version.
2130 pub fn wait_for_version(
2131 &mut self,
2132 version: clock::Global,
2133 ) -> impl Future<Output = Result<()>> + use<> {
2134 self.text.wait_for_version(version)
2135 }
2136
2137 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2138 /// [`Buffer::wait_for_version`] to resolve with an error.
2139 pub fn give_up_waiting(&mut self) {
2140 self.text.give_up_waiting();
2141 }
2142
2143 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2144 let mut rx = None;
2145 if !self.autoindent_requests.is_empty() {
2146 let channel = oneshot::channel();
2147 self.wait_for_autoindent_txs.push(channel.0);
2148 rx = Some(channel.1);
2149 }
2150 rx
2151 }
2152
2153 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2154 pub fn set_active_selections(
2155 &mut self,
2156 selections: Arc<[Selection<Anchor>]>,
2157 line_mode: bool,
2158 cursor_shape: CursorShape,
2159 cx: &mut Context<Self>,
2160 ) {
2161 let lamport_timestamp = self.text.lamport_clock.tick();
2162 self.remote_selections.insert(
2163 self.text.replica_id(),
2164 SelectionSet {
2165 selections: selections.clone(),
2166 lamport_timestamp,
2167 line_mode,
2168 cursor_shape,
2169 },
2170 );
2171 self.send_operation(
2172 Operation::UpdateSelections {
2173 selections,
2174 line_mode,
2175 lamport_timestamp,
2176 cursor_shape,
2177 },
2178 true,
2179 cx,
2180 );
2181 self.non_text_state_update_count += 1;
2182 cx.notify();
2183 }
2184
2185 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2186 /// this replica.
2187 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2188 if self
2189 .remote_selections
2190 .get(&self.text.replica_id())
2191 .is_none_or(|set| !set.selections.is_empty())
2192 {
2193 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2194 }
2195 }
2196
2197 pub fn set_agent_selections(
2198 &mut self,
2199 selections: Arc<[Selection<Anchor>]>,
2200 line_mode: bool,
2201 cursor_shape: CursorShape,
2202 cx: &mut Context<Self>,
2203 ) {
2204 let lamport_timestamp = self.text.lamport_clock.tick();
2205 self.remote_selections.insert(
2206 AGENT_REPLICA_ID,
2207 SelectionSet {
2208 selections,
2209 lamport_timestamp,
2210 line_mode,
2211 cursor_shape,
2212 },
2213 );
2214 self.non_text_state_update_count += 1;
2215 cx.notify();
2216 }
2217
2218 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2219 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2220 }
2221
2222 /// Replaces the buffer's entire text.
2223 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2224 where
2225 T: Into<Arc<str>>,
2226 {
2227 self.autoindent_requests.clear();
2228 self.edit([(0..self.len(), text)], None, cx)
2229 }
2230
2231 /// Appends the given text to the end of the buffer.
2232 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2233 where
2234 T: Into<Arc<str>>,
2235 {
2236 self.edit([(self.len()..self.len(), text)], None, cx)
2237 }
2238
2239 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2240 /// delete, and a string of text to insert at that location.
2241 ///
2242 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2243 /// request for the edited ranges, which will be processed when the buffer finishes
2244 /// parsing.
2245 ///
2246 /// Parsing takes place at the end of a transaction, and may compute synchronously
2247 /// or asynchronously, depending on the changes.
2248 pub fn edit<I, S, T>(
2249 &mut self,
2250 edits_iter: I,
2251 autoindent_mode: Option<AutoindentMode>,
2252 cx: &mut Context<Self>,
2253 ) -> Option<clock::Lamport>
2254 where
2255 I: IntoIterator<Item = (Range<S>, T)>,
2256 S: ToOffset,
2257 T: Into<Arc<str>>,
2258 {
2259 // Skip invalid edits and coalesce contiguous ones.
2260 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2261
2262 for (range, new_text) in edits_iter {
2263 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2264
2265 if range.start > range.end {
2266 mem::swap(&mut range.start, &mut range.end);
2267 }
2268 let new_text = new_text.into();
2269 if !new_text.is_empty() || !range.is_empty() {
2270 if let Some((prev_range, prev_text)) = edits.last_mut()
2271 && prev_range.end >= range.start
2272 {
2273 prev_range.end = cmp::max(prev_range.end, range.end);
2274 *prev_text = format!("{prev_text}{new_text}").into();
2275 } else {
2276 edits.push((range, new_text));
2277 }
2278 }
2279 }
2280 if edits.is_empty() {
2281 return None;
2282 }
2283
2284 self.start_transaction();
2285 self.pending_autoindent.take();
2286 let autoindent_request = autoindent_mode
2287 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2288
2289 let edit_operation = self.text.edit(edits.iter().cloned());
2290 let edit_id = edit_operation.timestamp();
2291
2292 if let Some((before_edit, mode)) = autoindent_request {
2293 let mut delta = 0isize;
2294 let mut previous_setting = None;
2295 let entries: Vec<_> = edits
2296 .into_iter()
2297 .enumerate()
2298 .zip(&edit_operation.as_edit().unwrap().new_text)
2299 .filter(|((_, (range, _)), _)| {
2300 let language = before_edit.language_at(range.start);
2301 let language_id = language.map(|l| l.id());
2302 if let Some((cached_language_id, auto_indent)) = previous_setting
2303 && cached_language_id == language_id
2304 {
2305 auto_indent
2306 } else {
2307 // The auto-indent setting is not present in editorconfigs, hence
2308 // we can avoid passing the file here.
2309 let auto_indent =
2310 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2311 previous_setting = Some((language_id, auto_indent));
2312 auto_indent
2313 }
2314 })
2315 .map(|((ix, (range, _)), new_text)| {
2316 let new_text_length = new_text.len();
2317 let old_start = range.start.to_point(&before_edit);
2318 let new_start = (delta + range.start as isize) as usize;
2319 let range_len = range.end - range.start;
2320 delta += new_text_length as isize - range_len as isize;
2321
2322 // Decide what range of the insertion to auto-indent, and whether
2323 // the first line of the insertion should be considered a newly-inserted line
2324 // or an edit to an existing line.
2325 let mut range_of_insertion_to_indent = 0..new_text_length;
2326 let mut first_line_is_new = true;
2327
2328 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2329 let old_line_end = before_edit.line_len(old_start.row);
2330
2331 if old_start.column > old_line_start {
2332 first_line_is_new = false;
2333 }
2334
2335 if !new_text.contains('\n')
2336 && (old_start.column + (range_len as u32) < old_line_end
2337 || old_line_end == old_line_start)
2338 {
2339 first_line_is_new = false;
2340 }
2341
2342 // When inserting text starting with a newline, avoid auto-indenting the
2343 // previous line.
2344 if new_text.starts_with('\n') {
2345 range_of_insertion_to_indent.start += 1;
2346 first_line_is_new = true;
2347 }
2348
2349 let mut original_indent_column = None;
2350 if let AutoindentMode::Block {
2351 original_indent_columns,
2352 } = &mode
2353 {
2354 original_indent_column = Some(if new_text.starts_with('\n') {
2355 indent_size_for_text(
2356 new_text[range_of_insertion_to_indent.clone()].chars(),
2357 )
2358 .len
2359 } else {
2360 original_indent_columns
2361 .get(ix)
2362 .copied()
2363 .flatten()
2364 .unwrap_or_else(|| {
2365 indent_size_for_text(
2366 new_text[range_of_insertion_to_indent.clone()].chars(),
2367 )
2368 .len
2369 })
2370 });
2371
2372 // Avoid auto-indenting the line after the edit.
2373 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2374 range_of_insertion_to_indent.end -= 1;
2375 }
2376 }
2377
2378 AutoindentRequestEntry {
2379 first_line_is_new,
2380 original_indent_column,
2381 indent_size: before_edit.language_indent_size_at(range.start, cx),
2382 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2383 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2384 }
2385 })
2386 .collect();
2387
2388 if !entries.is_empty() {
2389 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2390 before_edit,
2391 entries,
2392 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2393 ignore_empty_lines: false,
2394 }));
2395 }
2396 }
2397
2398 self.end_transaction(cx);
2399 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2400 Some(edit_id)
2401 }
2402
2403 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2404 self.was_changed();
2405
2406 if self.edits_since::<usize>(old_version).next().is_none() {
2407 return;
2408 }
2409
2410 self.reparse(cx);
2411 cx.emit(BufferEvent::Edited);
2412 if was_dirty != self.is_dirty() {
2413 cx.emit(BufferEvent::DirtyChanged);
2414 }
2415 cx.notify();
2416 }
2417
2418 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2419 where
2420 I: IntoIterator<Item = Range<T>>,
2421 T: ToOffset + Copy,
2422 {
2423 let before_edit = self.snapshot();
2424 let entries = ranges
2425 .into_iter()
2426 .map(|range| AutoindentRequestEntry {
2427 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2428 first_line_is_new: true,
2429 indent_size: before_edit.language_indent_size_at(range.start, cx),
2430 original_indent_column: None,
2431 })
2432 .collect();
2433 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2434 before_edit,
2435 entries,
2436 is_block_mode: false,
2437 ignore_empty_lines: true,
2438 }));
2439 self.request_autoindent(cx);
2440 }
2441
2442 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2443 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2444 pub fn insert_empty_line(
2445 &mut self,
2446 position: impl ToPoint,
2447 space_above: bool,
2448 space_below: bool,
2449 cx: &mut Context<Self>,
2450 ) -> Point {
2451 let mut position = position.to_point(self);
2452
2453 self.start_transaction();
2454
2455 self.edit(
2456 [(position..position, "\n")],
2457 Some(AutoindentMode::EachLine),
2458 cx,
2459 );
2460
2461 if position.column > 0 {
2462 position += Point::new(1, 0);
2463 }
2464
2465 if !self.is_line_blank(position.row) {
2466 self.edit(
2467 [(position..position, "\n")],
2468 Some(AutoindentMode::EachLine),
2469 cx,
2470 );
2471 }
2472
2473 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2474 self.edit(
2475 [(position..position, "\n")],
2476 Some(AutoindentMode::EachLine),
2477 cx,
2478 );
2479 position.row += 1;
2480 }
2481
2482 if space_below
2483 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2484 {
2485 self.edit(
2486 [(position..position, "\n")],
2487 Some(AutoindentMode::EachLine),
2488 cx,
2489 );
2490 }
2491
2492 self.end_transaction(cx);
2493
2494 position
2495 }
2496
2497 /// Applies the given remote operations to the buffer.
2498 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2499 self.pending_autoindent.take();
2500 let was_dirty = self.is_dirty();
2501 let old_version = self.version.clone();
2502 let mut deferred_ops = Vec::new();
2503 let buffer_ops = ops
2504 .into_iter()
2505 .filter_map(|op| match op {
2506 Operation::Buffer(op) => Some(op),
2507 _ => {
2508 if self.can_apply_op(&op) {
2509 self.apply_op(op, cx);
2510 } else {
2511 deferred_ops.push(op);
2512 }
2513 None
2514 }
2515 })
2516 .collect::<Vec<_>>();
2517 for operation in buffer_ops.iter() {
2518 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2519 }
2520 self.text.apply_ops(buffer_ops);
2521 self.deferred_ops.insert(deferred_ops);
2522 self.flush_deferred_ops(cx);
2523 self.did_edit(&old_version, was_dirty, cx);
2524 // Notify independently of whether the buffer was edited as the operations could include a
2525 // selection update.
2526 cx.notify();
2527 }
2528
2529 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2530 let mut deferred_ops = Vec::new();
2531 for op in self.deferred_ops.drain().iter().cloned() {
2532 if self.can_apply_op(&op) {
2533 self.apply_op(op, cx);
2534 } else {
2535 deferred_ops.push(op);
2536 }
2537 }
2538 self.deferred_ops.insert(deferred_ops);
2539 }
2540
2541 pub fn has_deferred_ops(&self) -> bool {
2542 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2543 }
2544
2545 fn can_apply_op(&self, operation: &Operation) -> bool {
2546 match operation {
2547 Operation::Buffer(_) => {
2548 unreachable!("buffer operations should never be applied at this layer")
2549 }
2550 Operation::UpdateDiagnostics {
2551 diagnostics: diagnostic_set,
2552 ..
2553 } => diagnostic_set.iter().all(|diagnostic| {
2554 self.text.can_resolve(&diagnostic.range.start)
2555 && self.text.can_resolve(&diagnostic.range.end)
2556 }),
2557 Operation::UpdateSelections { selections, .. } => selections
2558 .iter()
2559 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2560 Operation::UpdateCompletionTriggers { .. } => true,
2561 }
2562 }
2563
2564 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2565 match operation {
2566 Operation::Buffer(_) => {
2567 unreachable!("buffer operations should never be applied at this layer")
2568 }
2569 Operation::UpdateDiagnostics {
2570 server_id,
2571 diagnostics: diagnostic_set,
2572 lamport_timestamp,
2573 } => {
2574 let snapshot = self.snapshot();
2575 self.apply_diagnostic_update(
2576 server_id,
2577 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2578 lamport_timestamp,
2579 cx,
2580 );
2581 }
2582 Operation::UpdateSelections {
2583 selections,
2584 lamport_timestamp,
2585 line_mode,
2586 cursor_shape,
2587 } => {
2588 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2589 && set.lamport_timestamp > lamport_timestamp
2590 {
2591 return;
2592 }
2593
2594 self.remote_selections.insert(
2595 lamport_timestamp.replica_id,
2596 SelectionSet {
2597 selections,
2598 lamport_timestamp,
2599 line_mode,
2600 cursor_shape,
2601 },
2602 );
2603 self.text.lamport_clock.observe(lamport_timestamp);
2604 self.non_text_state_update_count += 1;
2605 }
2606 Operation::UpdateCompletionTriggers {
2607 triggers,
2608 lamport_timestamp,
2609 server_id,
2610 } => {
2611 if triggers.is_empty() {
2612 self.completion_triggers_per_language_server
2613 .remove(&server_id);
2614 self.completion_triggers = self
2615 .completion_triggers_per_language_server
2616 .values()
2617 .flat_map(|triggers| triggers.iter().cloned())
2618 .collect();
2619 } else {
2620 self.completion_triggers_per_language_server
2621 .insert(server_id, triggers.iter().cloned().collect());
2622 self.completion_triggers.extend(triggers);
2623 }
2624 self.text.lamport_clock.observe(lamport_timestamp);
2625 }
2626 }
2627 }
2628
2629 fn apply_diagnostic_update(
2630 &mut self,
2631 server_id: LanguageServerId,
2632 diagnostics: DiagnosticSet,
2633 lamport_timestamp: clock::Lamport,
2634 cx: &mut Context<Self>,
2635 ) {
2636 if lamport_timestamp > self.diagnostics_timestamp {
2637 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2638 if diagnostics.is_empty() {
2639 if let Ok(ix) = ix {
2640 self.diagnostics.remove(ix);
2641 }
2642 } else {
2643 match ix {
2644 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2645 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2646 };
2647 }
2648 self.diagnostics_timestamp = lamport_timestamp;
2649 self.non_text_state_update_count += 1;
2650 self.text.lamport_clock.observe(lamport_timestamp);
2651 cx.notify();
2652 cx.emit(BufferEvent::DiagnosticsUpdated);
2653 }
2654 }
2655
2656 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2657 self.was_changed();
2658 cx.emit(BufferEvent::Operation {
2659 operation,
2660 is_local,
2661 });
2662 }
2663
2664 /// Removes the selections for a given peer.
2665 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2666 self.remote_selections.remove(&replica_id);
2667 cx.notify();
2668 }
2669
2670 /// Undoes the most recent transaction.
2671 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2672 let was_dirty = self.is_dirty();
2673 let old_version = self.version.clone();
2674
2675 if let Some((transaction_id, operation)) = self.text.undo() {
2676 self.send_operation(Operation::Buffer(operation), true, cx);
2677 self.did_edit(&old_version, was_dirty, cx);
2678 Some(transaction_id)
2679 } else {
2680 None
2681 }
2682 }
2683
2684 /// Manually undoes a specific transaction in the buffer's undo history.
2685 pub fn undo_transaction(
2686 &mut self,
2687 transaction_id: TransactionId,
2688 cx: &mut Context<Self>,
2689 ) -> bool {
2690 let was_dirty = self.is_dirty();
2691 let old_version = self.version.clone();
2692 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2693 self.send_operation(Operation::Buffer(operation), true, cx);
2694 self.did_edit(&old_version, was_dirty, cx);
2695 true
2696 } else {
2697 false
2698 }
2699 }
2700
2701 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2702 pub fn undo_to_transaction(
2703 &mut self,
2704 transaction_id: TransactionId,
2705 cx: &mut Context<Self>,
2706 ) -> bool {
2707 let was_dirty = self.is_dirty();
2708 let old_version = self.version.clone();
2709
2710 let operations = self.text.undo_to_transaction(transaction_id);
2711 let undone = !operations.is_empty();
2712 for operation in operations {
2713 self.send_operation(Operation::Buffer(operation), true, cx);
2714 }
2715 if undone {
2716 self.did_edit(&old_version, was_dirty, cx)
2717 }
2718 undone
2719 }
2720
2721 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2722 let was_dirty = self.is_dirty();
2723 let operation = self.text.undo_operations(counts);
2724 let old_version = self.version.clone();
2725 self.send_operation(Operation::Buffer(operation), true, cx);
2726 self.did_edit(&old_version, was_dirty, cx);
2727 }
2728
2729 /// Manually redoes a specific transaction in the buffer's redo history.
2730 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2731 let was_dirty = self.is_dirty();
2732 let old_version = self.version.clone();
2733
2734 if let Some((transaction_id, operation)) = self.text.redo() {
2735 self.send_operation(Operation::Buffer(operation), true, cx);
2736 self.did_edit(&old_version, was_dirty, cx);
2737 Some(transaction_id)
2738 } else {
2739 None
2740 }
2741 }
2742
2743 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2744 pub fn redo_to_transaction(
2745 &mut self,
2746 transaction_id: TransactionId,
2747 cx: &mut Context<Self>,
2748 ) -> bool {
2749 let was_dirty = self.is_dirty();
2750 let old_version = self.version.clone();
2751
2752 let operations = self.text.redo_to_transaction(transaction_id);
2753 let redone = !operations.is_empty();
2754 for operation in operations {
2755 self.send_operation(Operation::Buffer(operation), true, cx);
2756 }
2757 if redone {
2758 self.did_edit(&old_version, was_dirty, cx)
2759 }
2760 redone
2761 }
2762
2763 /// Override current completion triggers with the user-provided completion triggers.
2764 pub fn set_completion_triggers(
2765 &mut self,
2766 server_id: LanguageServerId,
2767 triggers: BTreeSet<String>,
2768 cx: &mut Context<Self>,
2769 ) {
2770 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2771 if triggers.is_empty() {
2772 self.completion_triggers_per_language_server
2773 .remove(&server_id);
2774 self.completion_triggers = self
2775 .completion_triggers_per_language_server
2776 .values()
2777 .flat_map(|triggers| triggers.iter().cloned())
2778 .collect();
2779 } else {
2780 self.completion_triggers_per_language_server
2781 .insert(server_id, triggers.clone());
2782 self.completion_triggers.extend(triggers.iter().cloned());
2783 }
2784 self.send_operation(
2785 Operation::UpdateCompletionTriggers {
2786 triggers: triggers.into_iter().collect(),
2787 lamport_timestamp: self.completion_triggers_timestamp,
2788 server_id,
2789 },
2790 true,
2791 cx,
2792 );
2793 cx.notify();
2794 }
2795
2796 /// Returns a list of strings which trigger a completion menu for this language.
2797 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2798 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2799 &self.completion_triggers
2800 }
2801
2802 /// Call this directly after performing edits to prevent the preview tab
2803 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2804 /// to return false until there are additional edits.
2805 pub fn refresh_preview(&mut self) {
2806 self.preview_version = self.version.clone();
2807 }
2808
2809 /// Whether we should preserve the preview status of a tab containing this buffer.
2810 pub fn preserve_preview(&self) -> bool {
2811 !self.has_edits_since(&self.preview_version)
2812 }
2813}
2814
2815#[doc(hidden)]
2816#[cfg(any(test, feature = "test-support"))]
2817impl Buffer {
2818 pub fn edit_via_marked_text(
2819 &mut self,
2820 marked_string: &str,
2821 autoindent_mode: Option<AutoindentMode>,
2822 cx: &mut Context<Self>,
2823 ) {
2824 let edits = self.edits_for_marked_text(marked_string);
2825 self.edit(edits, autoindent_mode, cx);
2826 }
2827
2828 pub fn set_group_interval(&mut self, group_interval: Duration) {
2829 self.text.set_group_interval(group_interval);
2830 }
2831
2832 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2833 where
2834 T: rand::Rng,
2835 {
2836 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2837 let mut last_end = None;
2838 for _ in 0..old_range_count {
2839 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2840 break;
2841 }
2842
2843 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2844 let mut range = self.random_byte_range(new_start, rng);
2845 if rng.gen_bool(0.2) {
2846 mem::swap(&mut range.start, &mut range.end);
2847 }
2848 last_end = Some(range.end);
2849
2850 let new_text_len = rng.gen_range(0..10);
2851 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2852 new_text = new_text.to_uppercase();
2853
2854 edits.push((range, new_text));
2855 }
2856 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2857 self.edit(edits, None, cx);
2858 }
2859
2860 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2861 let was_dirty = self.is_dirty();
2862 let old_version = self.version.clone();
2863
2864 let ops = self.text.randomly_undo_redo(rng);
2865 if !ops.is_empty() {
2866 for op in ops {
2867 self.send_operation(Operation::Buffer(op), true, cx);
2868 self.did_edit(&old_version, was_dirty, cx);
2869 }
2870 }
2871 }
2872}
2873
2874impl EventEmitter<BufferEvent> for Buffer {}
2875
2876impl Deref for Buffer {
2877 type Target = TextBuffer;
2878
2879 fn deref(&self) -> &Self::Target {
2880 &self.text
2881 }
2882}
2883
2884impl BufferSnapshot {
2885 /// Returns [`IndentSize`] for a given line that respects user settings and
2886 /// language preferences.
2887 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2888 indent_size_for_line(self, row)
2889 }
2890
2891 /// Returns [`IndentSize`] for a given position that respects user settings
2892 /// and language preferences.
2893 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2894 let settings = language_settings(
2895 self.language_at(position).map(|l| l.name()),
2896 self.file(),
2897 cx,
2898 );
2899 if settings.hard_tabs {
2900 IndentSize::tab()
2901 } else {
2902 IndentSize::spaces(settings.tab_size.get())
2903 }
2904 }
2905
2906 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2907 /// is passed in as `single_indent_size`.
2908 pub fn suggested_indents(
2909 &self,
2910 rows: impl Iterator<Item = u32>,
2911 single_indent_size: IndentSize,
2912 ) -> BTreeMap<u32, IndentSize> {
2913 let mut result = BTreeMap::new();
2914
2915 for row_range in contiguous_ranges(rows, 10) {
2916 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2917 Some(suggestions) => suggestions,
2918 _ => break,
2919 };
2920
2921 for (row, suggestion) in row_range.zip(suggestions) {
2922 let indent_size = if let Some(suggestion) = suggestion {
2923 result
2924 .get(&suggestion.basis_row)
2925 .copied()
2926 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2927 .with_delta(suggestion.delta, single_indent_size)
2928 } else {
2929 self.indent_size_for_line(row)
2930 };
2931
2932 result.insert(row, indent_size);
2933 }
2934 }
2935
2936 result
2937 }
2938
2939 fn suggest_autoindents(
2940 &self,
2941 row_range: Range<u32>,
2942 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2943 let config = &self.language.as_ref()?.config;
2944 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2945
2946 #[derive(Debug, Clone)]
2947 struct StartPosition {
2948 start: Point,
2949 suffix: SharedString,
2950 }
2951
2952 // Find the suggested indentation ranges based on the syntax tree.
2953 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2954 let end = Point::new(row_range.end, 0);
2955 let range = (start..end).to_offset(&self.text);
2956 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2957 Some(&grammar.indents_config.as_ref()?.query)
2958 });
2959 let indent_configs = matches
2960 .grammars()
2961 .iter()
2962 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2963 .collect::<Vec<_>>();
2964
2965 let mut indent_ranges = Vec::<Range<Point>>::new();
2966 let mut start_positions = Vec::<StartPosition>::new();
2967 let mut outdent_positions = Vec::<Point>::new();
2968 while let Some(mat) = matches.peek() {
2969 let mut start: Option<Point> = None;
2970 let mut end: Option<Point> = None;
2971
2972 let config = indent_configs[mat.grammar_index];
2973 for capture in mat.captures {
2974 if capture.index == config.indent_capture_ix {
2975 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2976 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2977 } else if Some(capture.index) == config.start_capture_ix {
2978 start = Some(Point::from_ts_point(capture.node.end_position()));
2979 } else if Some(capture.index) == config.end_capture_ix {
2980 end = Some(Point::from_ts_point(capture.node.start_position()));
2981 } else if Some(capture.index) == config.outdent_capture_ix {
2982 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2983 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
2984 start_positions.push(StartPosition {
2985 start: Point::from_ts_point(capture.node.start_position()),
2986 suffix: suffix.clone(),
2987 });
2988 }
2989 }
2990
2991 matches.advance();
2992 if let Some((start, end)) = start.zip(end) {
2993 if start.row == end.row {
2994 continue;
2995 }
2996 let range = start..end;
2997 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2998 Err(ix) => indent_ranges.insert(ix, range),
2999 Ok(ix) => {
3000 let prev_range = &mut indent_ranges[ix];
3001 prev_range.end = prev_range.end.max(range.end);
3002 }
3003 }
3004 }
3005 }
3006
3007 let mut error_ranges = Vec::<Range<Point>>::new();
3008 let mut matches = self
3009 .syntax
3010 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3011 while let Some(mat) = matches.peek() {
3012 let node = mat.captures[0].node;
3013 let start = Point::from_ts_point(node.start_position());
3014 let end = Point::from_ts_point(node.end_position());
3015 let range = start..end;
3016 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3017 Ok(ix) | Err(ix) => ix,
3018 };
3019 let mut end_ix = ix;
3020 while let Some(existing_range) = error_ranges.get(end_ix) {
3021 if existing_range.end < end {
3022 end_ix += 1;
3023 } else {
3024 break;
3025 }
3026 }
3027 error_ranges.splice(ix..end_ix, [range]);
3028 matches.advance();
3029 }
3030
3031 outdent_positions.sort();
3032 for outdent_position in outdent_positions {
3033 // find the innermost indent range containing this outdent_position
3034 // set its end to the outdent position
3035 if let Some(range_to_truncate) = indent_ranges
3036 .iter_mut()
3037 .filter(|indent_range| indent_range.contains(&outdent_position))
3038 .next_back()
3039 {
3040 range_to_truncate.end = outdent_position;
3041 }
3042 }
3043
3044 start_positions.sort_by_key(|b| b.start);
3045
3046 // Find the suggested indentation increases and decreased based on regexes.
3047 let mut regex_outdent_map = HashMap::default();
3048 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3049 let mut start_positions_iter = start_positions.iter().peekable();
3050
3051 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3052 self.for_each_line(
3053 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3054 ..Point::new(row_range.end, 0),
3055 |row, line| {
3056 if config
3057 .decrease_indent_pattern
3058 .as_ref()
3059 .is_some_and(|regex| regex.is_match(line))
3060 {
3061 indent_change_rows.push((row, Ordering::Less));
3062 }
3063 if config
3064 .increase_indent_pattern
3065 .as_ref()
3066 .is_some_and(|regex| regex.is_match(line))
3067 {
3068 indent_change_rows.push((row + 1, Ordering::Greater));
3069 }
3070 while let Some(pos) = start_positions_iter.peek() {
3071 if pos.start.row < row {
3072 let pos = start_positions_iter.next().unwrap();
3073 last_seen_suffix
3074 .entry(pos.suffix.to_string())
3075 .or_default()
3076 .push(pos.start);
3077 } else {
3078 break;
3079 }
3080 }
3081 for rule in &config.decrease_indent_patterns {
3082 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3083 let row_start_column = self.indent_size_for_line(row).len;
3084 let basis_row = rule
3085 .valid_after
3086 .iter()
3087 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3088 .flatten()
3089 .filter(|start_point| start_point.column <= row_start_column)
3090 .max_by_key(|start_point| start_point.row);
3091 if let Some(outdent_to_row) = basis_row {
3092 regex_outdent_map.insert(row, outdent_to_row.row);
3093 }
3094 break;
3095 }
3096 }
3097 },
3098 );
3099
3100 let mut indent_changes = indent_change_rows.into_iter().peekable();
3101 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3102 prev_non_blank_row.unwrap_or(0)
3103 } else {
3104 row_range.start.saturating_sub(1)
3105 };
3106
3107 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3108 Some(row_range.map(move |row| {
3109 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3110
3111 let mut indent_from_prev_row = false;
3112 let mut outdent_from_prev_row = false;
3113 let mut outdent_to_row = u32::MAX;
3114 let mut from_regex = false;
3115
3116 while let Some((indent_row, delta)) = indent_changes.peek() {
3117 match indent_row.cmp(&row) {
3118 Ordering::Equal => match delta {
3119 Ordering::Less => {
3120 from_regex = true;
3121 outdent_from_prev_row = true
3122 }
3123 Ordering::Greater => {
3124 indent_from_prev_row = true;
3125 from_regex = true
3126 }
3127 _ => {}
3128 },
3129
3130 Ordering::Greater => break,
3131 Ordering::Less => {}
3132 }
3133
3134 indent_changes.next();
3135 }
3136
3137 for range in &indent_ranges {
3138 if range.start.row >= row {
3139 break;
3140 }
3141 if range.start.row == prev_row && range.end > row_start {
3142 indent_from_prev_row = true;
3143 }
3144 if range.end > prev_row_start && range.end <= row_start {
3145 outdent_to_row = outdent_to_row.min(range.start.row);
3146 }
3147 }
3148
3149 if let Some(basis_row) = regex_outdent_map.get(&row) {
3150 indent_from_prev_row = false;
3151 outdent_to_row = *basis_row;
3152 from_regex = true;
3153 }
3154
3155 let within_error = error_ranges
3156 .iter()
3157 .any(|e| e.start.row < row && e.end > row_start);
3158
3159 let suggestion = if outdent_to_row == prev_row
3160 || (outdent_from_prev_row && indent_from_prev_row)
3161 {
3162 Some(IndentSuggestion {
3163 basis_row: prev_row,
3164 delta: Ordering::Equal,
3165 within_error: within_error && !from_regex,
3166 })
3167 } else if indent_from_prev_row {
3168 Some(IndentSuggestion {
3169 basis_row: prev_row,
3170 delta: Ordering::Greater,
3171 within_error: within_error && !from_regex,
3172 })
3173 } else if outdent_to_row < prev_row {
3174 Some(IndentSuggestion {
3175 basis_row: outdent_to_row,
3176 delta: Ordering::Equal,
3177 within_error: within_error && !from_regex,
3178 })
3179 } else if outdent_from_prev_row {
3180 Some(IndentSuggestion {
3181 basis_row: prev_row,
3182 delta: Ordering::Less,
3183 within_error: within_error && !from_regex,
3184 })
3185 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3186 {
3187 Some(IndentSuggestion {
3188 basis_row: prev_row,
3189 delta: Ordering::Equal,
3190 within_error: within_error && !from_regex,
3191 })
3192 } else {
3193 None
3194 };
3195
3196 prev_row = row;
3197 prev_row_start = row_start;
3198 suggestion
3199 }))
3200 }
3201
3202 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3203 while row > 0 {
3204 row -= 1;
3205 if !self.is_line_blank(row) {
3206 return Some(row);
3207 }
3208 }
3209 None
3210 }
3211
3212 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3213 let captures = self.syntax.captures(range, &self.text, |grammar| {
3214 grammar.highlights_query.as_ref()
3215 });
3216 let highlight_maps = captures
3217 .grammars()
3218 .iter()
3219 .map(|grammar| grammar.highlight_map())
3220 .collect();
3221 (captures, highlight_maps)
3222 }
3223
3224 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3225 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3226 /// returned in chunks where each chunk has a single syntax highlighting style and
3227 /// diagnostic status.
3228 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3229 let range = range.start.to_offset(self)..range.end.to_offset(self);
3230
3231 let mut syntax = None;
3232 if language_aware {
3233 syntax = Some(self.get_highlights(range.clone()));
3234 }
3235 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3236 let diagnostics = language_aware;
3237 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3238 }
3239
3240 pub fn highlighted_text_for_range<T: ToOffset>(
3241 &self,
3242 range: Range<T>,
3243 override_style: Option<HighlightStyle>,
3244 syntax_theme: &SyntaxTheme,
3245 ) -> HighlightedText {
3246 HighlightedText::from_buffer_range(
3247 range,
3248 &self.text,
3249 &self.syntax,
3250 override_style,
3251 syntax_theme,
3252 )
3253 }
3254
3255 /// Invokes the given callback for each line of text in the given range of the buffer.
3256 /// Uses callback to avoid allocating a string for each line.
3257 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3258 let mut line = String::new();
3259 let mut row = range.start.row;
3260 for chunk in self
3261 .as_rope()
3262 .chunks_in_range(range.to_offset(self))
3263 .chain(["\n"])
3264 {
3265 for (newline_ix, text) in chunk.split('\n').enumerate() {
3266 if newline_ix > 0 {
3267 callback(row, &line);
3268 row += 1;
3269 line.clear();
3270 }
3271 line.push_str(text);
3272 }
3273 }
3274 }
3275
3276 /// Iterates over every [`SyntaxLayer`] in the buffer.
3277 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3278 self.syntax
3279 .layers_for_range(0..self.len(), &self.text, true)
3280 }
3281
3282 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3283 let offset = position.to_offset(self);
3284 self.syntax
3285 .layers_for_range(offset..offset, &self.text, false)
3286 .filter(|l| l.node().end_byte() > offset)
3287 .last()
3288 }
3289
3290 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3291 &self,
3292 range: Range<D>,
3293 ) -> Option<SyntaxLayer<'_>> {
3294 let range = range.to_offset(self);
3295 self.syntax
3296 .layers_for_range(range, &self.text, false)
3297 .max_by(|a, b| {
3298 if a.depth != b.depth {
3299 a.depth.cmp(&b.depth)
3300 } else if a.offset.0 != b.offset.0 {
3301 a.offset.0.cmp(&b.offset.0)
3302 } else {
3303 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3304 }
3305 })
3306 }
3307
3308 /// Returns the main [`Language`].
3309 pub fn language(&self) -> Option<&Arc<Language>> {
3310 self.language.as_ref()
3311 }
3312
3313 /// Returns the [`Language`] at the given location.
3314 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3315 self.syntax_layer_at(position)
3316 .map(|info| info.language)
3317 .or(self.language.as_ref())
3318 }
3319
3320 /// Returns the settings for the language at the given location.
3321 pub fn settings_at<'a, D: ToOffset>(
3322 &'a self,
3323 position: D,
3324 cx: &'a App,
3325 ) -> Cow<'a, LanguageSettings> {
3326 language_settings(
3327 self.language_at(position).map(|l| l.name()),
3328 self.file.as_ref(),
3329 cx,
3330 )
3331 }
3332
3333 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3334 CharClassifier::new(self.language_scope_at(point))
3335 }
3336
3337 /// Returns the [`LanguageScope`] at the given location.
3338 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3339 let offset = position.to_offset(self);
3340 let mut scope = None;
3341 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3342
3343 // Use the layer that has the smallest node intersecting the given point.
3344 for layer in self
3345 .syntax
3346 .layers_for_range(offset..offset, &self.text, false)
3347 {
3348 let mut cursor = layer.node().walk();
3349
3350 let mut range = None;
3351 loop {
3352 let child_range = cursor.node().byte_range();
3353 if !child_range.contains(&offset) {
3354 break;
3355 }
3356
3357 range = Some(child_range);
3358 if cursor.goto_first_child_for_byte(offset).is_none() {
3359 break;
3360 }
3361 }
3362
3363 if let Some(range) = range
3364 && smallest_range_and_depth.as_ref().is_none_or(
3365 |(smallest_range, smallest_range_depth)| {
3366 if layer.depth > *smallest_range_depth {
3367 true
3368 } else if layer.depth == *smallest_range_depth {
3369 range.len() < smallest_range.len()
3370 } else {
3371 false
3372 }
3373 },
3374 )
3375 {
3376 smallest_range_and_depth = Some((range, layer.depth));
3377 scope = Some(LanguageScope {
3378 language: layer.language.clone(),
3379 override_id: layer.override_id(offset, &self.text),
3380 });
3381 }
3382 }
3383
3384 scope.or_else(|| {
3385 self.language.clone().map(|language| LanguageScope {
3386 language,
3387 override_id: None,
3388 })
3389 })
3390 }
3391
3392 /// Returns a tuple of the range and character kind of the word
3393 /// surrounding the given position.
3394 pub fn surrounding_word<T: ToOffset>(
3395 &self,
3396 start: T,
3397 for_completion: bool,
3398 ) -> (Range<usize>, Option<CharKind>) {
3399 let mut start = start.to_offset(self);
3400 let mut end = start;
3401 let mut next_chars = self.chars_at(start).take(128).peekable();
3402 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3403
3404 let classifier = self
3405 .char_classifier_at(start)
3406 .for_completion(for_completion);
3407 let word_kind = cmp::max(
3408 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3409 next_chars.peek().copied().map(|c| classifier.kind(c)),
3410 );
3411
3412 for ch in prev_chars {
3413 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3414 start -= ch.len_utf8();
3415 } else {
3416 break;
3417 }
3418 }
3419
3420 for ch in next_chars {
3421 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3422 end += ch.len_utf8();
3423 } else {
3424 break;
3425 }
3426 }
3427
3428 (start..end, word_kind)
3429 }
3430
3431 /// Returns the closest syntax node enclosing the given range.
3432 pub fn syntax_ancestor<'a, T: ToOffset>(
3433 &'a self,
3434 range: Range<T>,
3435 ) -> Option<tree_sitter::Node<'a>> {
3436 let range = range.start.to_offset(self)..range.end.to_offset(self);
3437 let mut result: Option<tree_sitter::Node<'a>> = None;
3438 'outer: for layer in self
3439 .syntax
3440 .layers_for_range(range.clone(), &self.text, true)
3441 {
3442 let mut cursor = layer.node().walk();
3443
3444 // Descend to the first leaf that touches the start of the range.
3445 //
3446 // If the range is non-empty and the current node ends exactly at the start,
3447 // move to the next sibling to find a node that extends beyond the start.
3448 //
3449 // If the range is empty and the current node starts after the range position,
3450 // move to the previous sibling to find the node that contains the position.
3451 while cursor.goto_first_child_for_byte(range.start).is_some() {
3452 if !range.is_empty() && cursor.node().end_byte() == range.start {
3453 cursor.goto_next_sibling();
3454 }
3455 if range.is_empty() && cursor.node().start_byte() > range.start {
3456 cursor.goto_previous_sibling();
3457 }
3458 }
3459
3460 // Ascend to the smallest ancestor that strictly contains the range.
3461 loop {
3462 let node_range = cursor.node().byte_range();
3463 if node_range.start <= range.start
3464 && node_range.end >= range.end
3465 && node_range.len() > range.len()
3466 {
3467 break;
3468 }
3469 if !cursor.goto_parent() {
3470 continue 'outer;
3471 }
3472 }
3473
3474 let left_node = cursor.node();
3475 let mut layer_result = left_node;
3476
3477 // For an empty range, try to find another node immediately to the right of the range.
3478 if left_node.end_byte() == range.start {
3479 let mut right_node = None;
3480 while !cursor.goto_next_sibling() {
3481 if !cursor.goto_parent() {
3482 break;
3483 }
3484 }
3485
3486 while cursor.node().start_byte() == range.start {
3487 right_node = Some(cursor.node());
3488 if !cursor.goto_first_child() {
3489 break;
3490 }
3491 }
3492
3493 // If there is a candidate node on both sides of the (empty) range, then
3494 // decide between the two by favoring a named node over an anonymous token.
3495 // If both nodes are the same in that regard, favor the right one.
3496 if let Some(right_node) = right_node
3497 && (right_node.is_named() || !left_node.is_named())
3498 {
3499 layer_result = right_node;
3500 }
3501 }
3502
3503 if let Some(previous_result) = &result
3504 && previous_result.byte_range().len() < layer_result.byte_range().len()
3505 {
3506 continue;
3507 }
3508 result = Some(layer_result);
3509 }
3510
3511 result
3512 }
3513
3514 /// Returns the root syntax node within the given row
3515 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3516 let start_offset = position.to_offset(self);
3517
3518 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3519
3520 let layer = self
3521 .syntax
3522 .layers_for_range(start_offset..start_offset, &self.text, true)
3523 .next()?;
3524
3525 let mut cursor = layer.node().walk();
3526
3527 // Descend to the first leaf that touches the start of the range.
3528 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3529 if cursor.node().end_byte() == start_offset {
3530 cursor.goto_next_sibling();
3531 }
3532 }
3533
3534 // Ascend to the root node within the same row.
3535 while cursor.goto_parent() {
3536 if cursor.node().start_position().row != row {
3537 break;
3538 }
3539 }
3540
3541 Some(cursor.node())
3542 }
3543
3544 /// Returns the outline for the buffer.
3545 ///
3546 /// This method allows passing an optional [`SyntaxTheme`] to
3547 /// syntax-highlight the returned symbols.
3548 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3549 self.outline_items_containing(0..self.len(), true, theme)
3550 .map(Outline::new)
3551 }
3552
3553 /// Returns all the symbols that contain the given position.
3554 ///
3555 /// This method allows passing an optional [`SyntaxTheme`] to
3556 /// syntax-highlight the returned symbols.
3557 pub fn symbols_containing<T: ToOffset>(
3558 &self,
3559 position: T,
3560 theme: Option<&SyntaxTheme>,
3561 ) -> Option<Vec<OutlineItem<Anchor>>> {
3562 let position = position.to_offset(self);
3563 let mut items = self.outline_items_containing(
3564 position.saturating_sub(1)..self.len().min(position + 1),
3565 false,
3566 theme,
3567 )?;
3568 let mut prev_depth = None;
3569 items.retain(|item| {
3570 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3571 prev_depth = Some(item.depth);
3572 result
3573 });
3574 Some(items)
3575 }
3576
3577 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3578 let range = range.to_offset(self);
3579 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3580 grammar.outline_config.as_ref().map(|c| &c.query)
3581 });
3582 let configs = matches
3583 .grammars()
3584 .iter()
3585 .map(|g| g.outline_config.as_ref().unwrap())
3586 .collect::<Vec<_>>();
3587
3588 while let Some(mat) = matches.peek() {
3589 let config = &configs[mat.grammar_index];
3590 let containing_item_node = maybe!({
3591 let item_node = mat.captures.iter().find_map(|cap| {
3592 if cap.index == config.item_capture_ix {
3593 Some(cap.node)
3594 } else {
3595 None
3596 }
3597 })?;
3598
3599 let item_byte_range = item_node.byte_range();
3600 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3601 None
3602 } else {
3603 Some(item_node)
3604 }
3605 });
3606
3607 if let Some(item_node) = containing_item_node {
3608 return Some(
3609 Point::from_ts_point(item_node.start_position())
3610 ..Point::from_ts_point(item_node.end_position()),
3611 );
3612 }
3613
3614 matches.advance();
3615 }
3616 None
3617 }
3618
3619 pub fn outline_items_containing<T: ToOffset>(
3620 &self,
3621 range: Range<T>,
3622 include_extra_context: bool,
3623 theme: Option<&SyntaxTheme>,
3624 ) -> Option<Vec<OutlineItem<Anchor>>> {
3625 let range = range.to_offset(self);
3626 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3627 grammar.outline_config.as_ref().map(|c| &c.query)
3628 });
3629 let configs = matches
3630 .grammars()
3631 .iter()
3632 .map(|g| g.outline_config.as_ref().unwrap())
3633 .collect::<Vec<_>>();
3634
3635 let mut items = Vec::new();
3636 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3637 while let Some(mat) = matches.peek() {
3638 let config = &configs[mat.grammar_index];
3639 if let Some(item) =
3640 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3641 {
3642 items.push(item);
3643 } else if let Some(capture) = mat
3644 .captures
3645 .iter()
3646 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3647 {
3648 let capture_range = capture.node.start_position()..capture.node.end_position();
3649 let mut capture_row_range =
3650 capture_range.start.row as u32..capture_range.end.row as u32;
3651 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3652 {
3653 capture_row_range.end -= 1;
3654 }
3655 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3656 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3657 last_row_range.end = capture_row_range.end;
3658 } else {
3659 annotation_row_ranges.push(capture_row_range);
3660 }
3661 } else {
3662 annotation_row_ranges.push(capture_row_range);
3663 }
3664 }
3665 matches.advance();
3666 }
3667
3668 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3669
3670 // Assign depths based on containment relationships and convert to anchors.
3671 let mut item_ends_stack = Vec::<Point>::new();
3672 let mut anchor_items = Vec::new();
3673 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3674 for item in items {
3675 while let Some(last_end) = item_ends_stack.last().copied() {
3676 if last_end < item.range.end {
3677 item_ends_stack.pop();
3678 } else {
3679 break;
3680 }
3681 }
3682
3683 let mut annotation_row_range = None;
3684 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3685 let row_preceding_item = item.range.start.row.saturating_sub(1);
3686 if next_annotation_row_range.end < row_preceding_item {
3687 annotation_row_ranges.next();
3688 } else {
3689 if next_annotation_row_range.end == row_preceding_item {
3690 annotation_row_range = Some(next_annotation_row_range.clone());
3691 annotation_row_ranges.next();
3692 }
3693 break;
3694 }
3695 }
3696
3697 anchor_items.push(OutlineItem {
3698 depth: item_ends_stack.len(),
3699 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3700 text: item.text,
3701 highlight_ranges: item.highlight_ranges,
3702 name_ranges: item.name_ranges,
3703 body_range: item.body_range.map(|body_range| {
3704 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3705 }),
3706 annotation_range: annotation_row_range.map(|annotation_range| {
3707 self.anchor_after(Point::new(annotation_range.start, 0))
3708 ..self.anchor_before(Point::new(
3709 annotation_range.end,
3710 self.line_len(annotation_range.end),
3711 ))
3712 }),
3713 });
3714 item_ends_stack.push(item.range.end);
3715 }
3716
3717 Some(anchor_items)
3718 }
3719
3720 fn next_outline_item(
3721 &self,
3722 config: &OutlineConfig,
3723 mat: &SyntaxMapMatch,
3724 range: &Range<usize>,
3725 include_extra_context: bool,
3726 theme: Option<&SyntaxTheme>,
3727 ) -> Option<OutlineItem<Point>> {
3728 let item_node = mat.captures.iter().find_map(|cap| {
3729 if cap.index == config.item_capture_ix {
3730 Some(cap.node)
3731 } else {
3732 None
3733 }
3734 })?;
3735
3736 let item_byte_range = item_node.byte_range();
3737 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3738 return None;
3739 }
3740 let item_point_range = Point::from_ts_point(item_node.start_position())
3741 ..Point::from_ts_point(item_node.end_position());
3742
3743 let mut open_point = None;
3744 let mut close_point = None;
3745 let mut buffer_ranges = Vec::new();
3746 for capture in mat.captures {
3747 let node_is_name;
3748 if capture.index == config.name_capture_ix {
3749 node_is_name = true;
3750 } else if Some(capture.index) == config.context_capture_ix
3751 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3752 {
3753 node_is_name = false;
3754 } else {
3755 if Some(capture.index) == config.open_capture_ix {
3756 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3757 } else if Some(capture.index) == config.close_capture_ix {
3758 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3759 }
3760
3761 continue;
3762 }
3763
3764 let mut range = capture.node.start_byte()..capture.node.end_byte();
3765 let start = capture.node.start_position();
3766 if capture.node.end_position().row > start.row {
3767 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3768 }
3769
3770 if !range.is_empty() {
3771 buffer_ranges.push((range, node_is_name));
3772 }
3773 }
3774 if buffer_ranges.is_empty() {
3775 return None;
3776 }
3777 let mut text = String::new();
3778 let mut highlight_ranges = Vec::new();
3779 let mut name_ranges = Vec::new();
3780 let mut chunks = self.chunks(
3781 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3782 true,
3783 );
3784 let mut last_buffer_range_end = 0;
3785
3786 for (buffer_range, is_name) in buffer_ranges {
3787 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3788 if space_added {
3789 text.push(' ');
3790 }
3791 let before_append_len = text.len();
3792 let mut offset = buffer_range.start;
3793 chunks.seek(buffer_range.clone());
3794 for mut chunk in chunks.by_ref() {
3795 if chunk.text.len() > buffer_range.end - offset {
3796 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3797 offset = buffer_range.end;
3798 } else {
3799 offset += chunk.text.len();
3800 }
3801 let style = chunk
3802 .syntax_highlight_id
3803 .zip(theme)
3804 .and_then(|(highlight, theme)| highlight.style(theme));
3805 if let Some(style) = style {
3806 let start = text.len();
3807 let end = start + chunk.text.len();
3808 highlight_ranges.push((start..end, style));
3809 }
3810 text.push_str(chunk.text);
3811 if offset >= buffer_range.end {
3812 break;
3813 }
3814 }
3815 if is_name {
3816 let after_append_len = text.len();
3817 let start = if space_added && !name_ranges.is_empty() {
3818 before_append_len - 1
3819 } else {
3820 before_append_len
3821 };
3822 name_ranges.push(start..after_append_len);
3823 }
3824 last_buffer_range_end = buffer_range.end;
3825 }
3826
3827 Some(OutlineItem {
3828 depth: 0, // We'll calculate the depth later
3829 range: item_point_range,
3830 text,
3831 highlight_ranges,
3832 name_ranges,
3833 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3834 annotation_range: None,
3835 })
3836 }
3837
3838 pub fn function_body_fold_ranges<T: ToOffset>(
3839 &self,
3840 within: Range<T>,
3841 ) -> impl Iterator<Item = Range<usize>> + '_ {
3842 self.text_object_ranges(within, TreeSitterOptions::default())
3843 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3844 }
3845
3846 /// For each grammar in the language, runs the provided
3847 /// [`tree_sitter::Query`] against the given range.
3848 pub fn matches(
3849 &self,
3850 range: Range<usize>,
3851 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3852 ) -> SyntaxMapMatches<'_> {
3853 self.syntax.matches(range, self, query)
3854 }
3855
3856 pub fn all_bracket_ranges(
3857 &self,
3858 range: Range<usize>,
3859 ) -> impl Iterator<Item = BracketMatch> + '_ {
3860 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3861 grammar.brackets_config.as_ref().map(|c| &c.query)
3862 });
3863 let configs = matches
3864 .grammars()
3865 .iter()
3866 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3867 .collect::<Vec<_>>();
3868
3869 iter::from_fn(move || {
3870 while let Some(mat) = matches.peek() {
3871 let mut open = None;
3872 let mut close = None;
3873 let config = &configs[mat.grammar_index];
3874 let pattern = &config.patterns[mat.pattern_index];
3875 for capture in mat.captures {
3876 if capture.index == config.open_capture_ix {
3877 open = Some(capture.node.byte_range());
3878 } else if capture.index == config.close_capture_ix {
3879 close = Some(capture.node.byte_range());
3880 }
3881 }
3882
3883 matches.advance();
3884
3885 let Some((open_range, close_range)) = open.zip(close) else {
3886 continue;
3887 };
3888
3889 let bracket_range = open_range.start..=close_range.end;
3890 if !bracket_range.overlaps(&range) {
3891 continue;
3892 }
3893
3894 return Some(BracketMatch {
3895 open_range,
3896 close_range,
3897 newline_only: pattern.newline_only,
3898 });
3899 }
3900 None
3901 })
3902 }
3903
3904 /// Returns bracket range pairs overlapping or adjacent to `range`
3905 pub fn bracket_ranges<T: ToOffset>(
3906 &self,
3907 range: Range<T>,
3908 ) -> impl Iterator<Item = BracketMatch> + '_ {
3909 // Find bracket pairs that *inclusively* contain the given range.
3910 let range = range.start.to_offset(self).saturating_sub(1)
3911 ..self.len().min(range.end.to_offset(self) + 1);
3912 self.all_bracket_ranges(range)
3913 .filter(|pair| !pair.newline_only)
3914 }
3915
3916 pub fn debug_variables_query<T: ToOffset>(
3917 &self,
3918 range: Range<T>,
3919 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
3920 let range = range.start.to_offset(self).saturating_sub(1)
3921 ..self.len().min(range.end.to_offset(self) + 1);
3922
3923 let mut matches = self.syntax.matches_with_options(
3924 range.clone(),
3925 &self.text,
3926 TreeSitterOptions::default(),
3927 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
3928 );
3929
3930 let configs = matches
3931 .grammars()
3932 .iter()
3933 .map(|grammar| grammar.debug_variables_config.as_ref())
3934 .collect::<Vec<_>>();
3935
3936 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
3937
3938 iter::from_fn(move || {
3939 loop {
3940 while let Some(capture) = captures.pop() {
3941 if capture.0.overlaps(&range) {
3942 return Some(capture);
3943 }
3944 }
3945
3946 let mat = matches.peek()?;
3947
3948 let Some(config) = configs[mat.grammar_index].as_ref() else {
3949 matches.advance();
3950 continue;
3951 };
3952
3953 for capture in mat.captures {
3954 let Some(ix) = config
3955 .objects_by_capture_ix
3956 .binary_search_by_key(&capture.index, |e| e.0)
3957 .ok()
3958 else {
3959 continue;
3960 };
3961 let text_object = config.objects_by_capture_ix[ix].1;
3962 let byte_range = capture.node.byte_range();
3963
3964 let mut found = false;
3965 for (range, existing) in captures.iter_mut() {
3966 if existing == &text_object {
3967 range.start = range.start.min(byte_range.start);
3968 range.end = range.end.max(byte_range.end);
3969 found = true;
3970 break;
3971 }
3972 }
3973
3974 if !found {
3975 captures.push((byte_range, text_object));
3976 }
3977 }
3978
3979 matches.advance();
3980 }
3981 })
3982 }
3983
3984 pub fn text_object_ranges<T: ToOffset>(
3985 &self,
3986 range: Range<T>,
3987 options: TreeSitterOptions,
3988 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3989 let range = range.start.to_offset(self).saturating_sub(1)
3990 ..self.len().min(range.end.to_offset(self) + 1);
3991
3992 let mut matches =
3993 self.syntax
3994 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3995 grammar.text_object_config.as_ref().map(|c| &c.query)
3996 });
3997
3998 let configs = matches
3999 .grammars()
4000 .iter()
4001 .map(|grammar| grammar.text_object_config.as_ref())
4002 .collect::<Vec<_>>();
4003
4004 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4005
4006 iter::from_fn(move || {
4007 loop {
4008 while let Some(capture) = captures.pop() {
4009 if capture.0.overlaps(&range) {
4010 return Some(capture);
4011 }
4012 }
4013
4014 let mat = matches.peek()?;
4015
4016 let Some(config) = configs[mat.grammar_index].as_ref() else {
4017 matches.advance();
4018 continue;
4019 };
4020
4021 for capture in mat.captures {
4022 let Some(ix) = config
4023 .text_objects_by_capture_ix
4024 .binary_search_by_key(&capture.index, |e| e.0)
4025 .ok()
4026 else {
4027 continue;
4028 };
4029 let text_object = config.text_objects_by_capture_ix[ix].1;
4030 let byte_range = capture.node.byte_range();
4031
4032 let mut found = false;
4033 for (range, existing) in captures.iter_mut() {
4034 if existing == &text_object {
4035 range.start = range.start.min(byte_range.start);
4036 range.end = range.end.max(byte_range.end);
4037 found = true;
4038 break;
4039 }
4040 }
4041
4042 if !found {
4043 captures.push((byte_range, text_object));
4044 }
4045 }
4046
4047 matches.advance();
4048 }
4049 })
4050 }
4051
4052 /// Returns enclosing bracket ranges containing the given range
4053 pub fn enclosing_bracket_ranges<T: ToOffset>(
4054 &self,
4055 range: Range<T>,
4056 ) -> impl Iterator<Item = BracketMatch> + '_ {
4057 let range = range.start.to_offset(self)..range.end.to_offset(self);
4058
4059 self.bracket_ranges(range.clone()).filter(move |pair| {
4060 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4061 })
4062 }
4063
4064 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4065 ///
4066 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4067 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4068 &self,
4069 range: Range<T>,
4070 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4071 ) -> Option<(Range<usize>, Range<usize>)> {
4072 let range = range.start.to_offset(self)..range.end.to_offset(self);
4073
4074 // Get the ranges of the innermost pair of brackets.
4075 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4076
4077 for pair in self.enclosing_bracket_ranges(range) {
4078 if let Some(range_filter) = range_filter
4079 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4080 {
4081 continue;
4082 }
4083
4084 let len = pair.close_range.end - pair.open_range.start;
4085
4086 if let Some((existing_open, existing_close)) = &result {
4087 let existing_len = existing_close.end - existing_open.start;
4088 if len > existing_len {
4089 continue;
4090 }
4091 }
4092
4093 result = Some((pair.open_range, pair.close_range));
4094 }
4095
4096 result
4097 }
4098
4099 /// Returns anchor ranges for any matches of the redaction query.
4100 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4101 /// will be run on the relevant section of the buffer.
4102 pub fn redacted_ranges<T: ToOffset>(
4103 &self,
4104 range: Range<T>,
4105 ) -> impl Iterator<Item = Range<usize>> + '_ {
4106 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4107 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4108 grammar
4109 .redactions_config
4110 .as_ref()
4111 .map(|config| &config.query)
4112 });
4113
4114 let configs = syntax_matches
4115 .grammars()
4116 .iter()
4117 .map(|grammar| grammar.redactions_config.as_ref())
4118 .collect::<Vec<_>>();
4119
4120 iter::from_fn(move || {
4121 let redacted_range = syntax_matches
4122 .peek()
4123 .and_then(|mat| {
4124 configs[mat.grammar_index].and_then(|config| {
4125 mat.captures
4126 .iter()
4127 .find(|capture| capture.index == config.redaction_capture_ix)
4128 })
4129 })
4130 .map(|mat| mat.node.byte_range());
4131 syntax_matches.advance();
4132 redacted_range
4133 })
4134 }
4135
4136 pub fn injections_intersecting_range<T: ToOffset>(
4137 &self,
4138 range: Range<T>,
4139 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4140 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4141
4142 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4143 grammar
4144 .injection_config
4145 .as_ref()
4146 .map(|config| &config.query)
4147 });
4148
4149 let configs = syntax_matches
4150 .grammars()
4151 .iter()
4152 .map(|grammar| grammar.injection_config.as_ref())
4153 .collect::<Vec<_>>();
4154
4155 iter::from_fn(move || {
4156 let ranges = syntax_matches.peek().and_then(|mat| {
4157 let config = &configs[mat.grammar_index]?;
4158 let content_capture_range = mat.captures.iter().find_map(|capture| {
4159 if capture.index == config.content_capture_ix {
4160 Some(capture.node.byte_range())
4161 } else {
4162 None
4163 }
4164 })?;
4165 let language = self.language_at(content_capture_range.start)?;
4166 Some((content_capture_range, language))
4167 });
4168 syntax_matches.advance();
4169 ranges
4170 })
4171 }
4172
4173 pub fn runnable_ranges(
4174 &self,
4175 offset_range: Range<usize>,
4176 ) -> impl Iterator<Item = RunnableRange> + '_ {
4177 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4178 grammar.runnable_config.as_ref().map(|config| &config.query)
4179 });
4180
4181 let test_configs = syntax_matches
4182 .grammars()
4183 .iter()
4184 .map(|grammar| grammar.runnable_config.as_ref())
4185 .collect::<Vec<_>>();
4186
4187 iter::from_fn(move || {
4188 loop {
4189 let mat = syntax_matches.peek()?;
4190
4191 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4192 let mut run_range = None;
4193 let full_range = mat.captures.iter().fold(
4194 Range {
4195 start: usize::MAX,
4196 end: 0,
4197 },
4198 |mut acc, next| {
4199 let byte_range = next.node.byte_range();
4200 if acc.start > byte_range.start {
4201 acc.start = byte_range.start;
4202 }
4203 if acc.end < byte_range.end {
4204 acc.end = byte_range.end;
4205 }
4206 acc
4207 },
4208 );
4209 if full_range.start > full_range.end {
4210 // We did not find a full spanning range of this match.
4211 return None;
4212 }
4213 let extra_captures: SmallVec<[_; 1]> =
4214 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4215 test_configs
4216 .extra_captures
4217 .get(capture.index as usize)
4218 .cloned()
4219 .and_then(|tag_name| match tag_name {
4220 RunnableCapture::Named(name) => {
4221 Some((capture.node.byte_range(), name))
4222 }
4223 RunnableCapture::Run => {
4224 let _ = run_range.insert(capture.node.byte_range());
4225 None
4226 }
4227 })
4228 }));
4229 let run_range = run_range?;
4230 let tags = test_configs
4231 .query
4232 .property_settings(mat.pattern_index)
4233 .iter()
4234 .filter_map(|property| {
4235 if *property.key == *"tag" {
4236 property
4237 .value
4238 .as_ref()
4239 .map(|value| RunnableTag(value.to_string().into()))
4240 } else {
4241 None
4242 }
4243 })
4244 .collect();
4245 let extra_captures = extra_captures
4246 .into_iter()
4247 .map(|(range, name)| {
4248 (
4249 name.to_string(),
4250 self.text_for_range(range).collect::<String>(),
4251 )
4252 })
4253 .collect();
4254 // All tags should have the same range.
4255 Some(RunnableRange {
4256 run_range,
4257 full_range,
4258 runnable: Runnable {
4259 tags,
4260 language: mat.language,
4261 buffer: self.remote_id(),
4262 },
4263 extra_captures,
4264 buffer_id: self.remote_id(),
4265 })
4266 });
4267
4268 syntax_matches.advance();
4269 if test_range.is_some() {
4270 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4271 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4272 return test_range;
4273 }
4274 }
4275 })
4276 }
4277
4278 /// Returns selections for remote peers intersecting the given range.
4279 #[allow(clippy::type_complexity)]
4280 pub fn selections_in_range(
4281 &self,
4282 range: Range<Anchor>,
4283 include_local: bool,
4284 ) -> impl Iterator<
4285 Item = (
4286 ReplicaId,
4287 bool,
4288 CursorShape,
4289 impl Iterator<Item = &Selection<Anchor>> + '_,
4290 ),
4291 > + '_ {
4292 self.remote_selections
4293 .iter()
4294 .filter(move |(replica_id, set)| {
4295 (include_local || **replica_id != self.text.replica_id())
4296 && !set.selections.is_empty()
4297 })
4298 .map(move |(replica_id, set)| {
4299 let start_ix = match set.selections.binary_search_by(|probe| {
4300 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4301 }) {
4302 Ok(ix) | Err(ix) => ix,
4303 };
4304 let end_ix = match set.selections.binary_search_by(|probe| {
4305 probe.start.cmp(&range.end, self).then(Ordering::Less)
4306 }) {
4307 Ok(ix) | Err(ix) => ix,
4308 };
4309
4310 (
4311 *replica_id,
4312 set.line_mode,
4313 set.cursor_shape,
4314 set.selections[start_ix..end_ix].iter(),
4315 )
4316 })
4317 }
4318
4319 /// Returns if the buffer contains any diagnostics.
4320 pub fn has_diagnostics(&self) -> bool {
4321 !self.diagnostics.is_empty()
4322 }
4323
4324 /// Returns all the diagnostics intersecting the given range.
4325 pub fn diagnostics_in_range<'a, T, O>(
4326 &'a self,
4327 search_range: Range<T>,
4328 reversed: bool,
4329 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4330 where
4331 T: 'a + Clone + ToOffset,
4332 O: 'a + FromAnchor,
4333 {
4334 let mut iterators: Vec<_> = self
4335 .diagnostics
4336 .iter()
4337 .map(|(_, collection)| {
4338 collection
4339 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4340 .peekable()
4341 })
4342 .collect();
4343
4344 std::iter::from_fn(move || {
4345 let (next_ix, _) = iterators
4346 .iter_mut()
4347 .enumerate()
4348 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4349 .min_by(|(_, a), (_, b)| {
4350 let cmp = a
4351 .range
4352 .start
4353 .cmp(&b.range.start, self)
4354 // when range is equal, sort by diagnostic severity
4355 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4356 // and stabilize order with group_id
4357 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4358 if reversed { cmp.reverse() } else { cmp }
4359 })?;
4360 iterators[next_ix]
4361 .next()
4362 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4363 diagnostic,
4364 range: FromAnchor::from_anchor(&range.start, self)
4365 ..FromAnchor::from_anchor(&range.end, self),
4366 })
4367 })
4368 }
4369
4370 /// Returns all the diagnostic groups associated with the given
4371 /// language server ID. If no language server ID is provided,
4372 /// all diagnostics groups are returned.
4373 pub fn diagnostic_groups(
4374 &self,
4375 language_server_id: Option<LanguageServerId>,
4376 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4377 let mut groups = Vec::new();
4378
4379 if let Some(language_server_id) = language_server_id {
4380 if let Ok(ix) = self
4381 .diagnostics
4382 .binary_search_by_key(&language_server_id, |e| e.0)
4383 {
4384 self.diagnostics[ix]
4385 .1
4386 .groups(language_server_id, &mut groups, self);
4387 }
4388 } else {
4389 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4390 diagnostics.groups(*language_server_id, &mut groups, self);
4391 }
4392 }
4393
4394 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4395 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4396 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4397 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4398 });
4399
4400 groups
4401 }
4402
4403 /// Returns an iterator over the diagnostics for the given group.
4404 pub fn diagnostic_group<O>(
4405 &self,
4406 group_id: usize,
4407 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4408 where
4409 O: FromAnchor + 'static,
4410 {
4411 self.diagnostics
4412 .iter()
4413 .flat_map(move |(_, set)| set.group(group_id, self))
4414 }
4415
4416 /// An integer version number that accounts for all updates besides
4417 /// the buffer's text itself (which is versioned via a version vector).
4418 pub fn non_text_state_update_count(&self) -> usize {
4419 self.non_text_state_update_count
4420 }
4421
4422 /// An integer version that changes when the buffer's syntax changes.
4423 pub fn syntax_update_count(&self) -> usize {
4424 self.syntax.update_count()
4425 }
4426
4427 /// Returns a snapshot of underlying file.
4428 pub fn file(&self) -> Option<&Arc<dyn File>> {
4429 self.file.as_ref()
4430 }
4431
4432 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4433 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4434 if let Some(file) = self.file() {
4435 if file.path().file_name().is_none() || include_root {
4436 Some(file.full_path(cx))
4437 } else {
4438 Some(file.path().to_path_buf())
4439 }
4440 } else {
4441 None
4442 }
4443 }
4444
4445 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4446 let query_str = query.fuzzy_contents;
4447 if query_str.is_some_and(|query| query.is_empty()) {
4448 return BTreeMap::default();
4449 }
4450
4451 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4452 language,
4453 override_id: None,
4454 }));
4455
4456 let mut query_ix = 0;
4457 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4458 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4459
4460 let mut words = BTreeMap::default();
4461 let mut current_word_start_ix = None;
4462 let mut chunk_ix = query.range.start;
4463 for chunk in self.chunks(query.range, false) {
4464 for (i, c) in chunk.text.char_indices() {
4465 let ix = chunk_ix + i;
4466 if classifier.is_word(c) {
4467 if current_word_start_ix.is_none() {
4468 current_word_start_ix = Some(ix);
4469 }
4470
4471 if let Some(query_chars) = &query_chars
4472 && query_ix < query_len
4473 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4474 {
4475 query_ix += 1;
4476 }
4477 continue;
4478 } else if let Some(word_start) = current_word_start_ix.take()
4479 && query_ix == query_len
4480 {
4481 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4482 let mut word_text = self.text_for_range(word_start..ix).peekable();
4483 let first_char = word_text
4484 .peek()
4485 .and_then(|first_chunk| first_chunk.chars().next());
4486 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4487 if !query.skip_digits
4488 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4489 {
4490 words.insert(word_text.collect(), word_range);
4491 }
4492 }
4493 query_ix = 0;
4494 }
4495 chunk_ix += chunk.text.len();
4496 }
4497
4498 words
4499 }
4500}
4501
4502pub struct WordsQuery<'a> {
4503 /// Only returns words with all chars from the fuzzy string in them.
4504 pub fuzzy_contents: Option<&'a str>,
4505 /// Skips words that start with a digit.
4506 pub skip_digits: bool,
4507 /// Buffer offset range, to look for words.
4508 pub range: Range<usize>,
4509}
4510
4511fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4512 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4513}
4514
4515fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4516 let mut result = IndentSize::spaces(0);
4517 for c in text {
4518 let kind = match c {
4519 ' ' => IndentKind::Space,
4520 '\t' => IndentKind::Tab,
4521 _ => break,
4522 };
4523 if result.len == 0 {
4524 result.kind = kind;
4525 }
4526 result.len += 1;
4527 }
4528 result
4529}
4530
4531impl Clone for BufferSnapshot {
4532 fn clone(&self) -> Self {
4533 Self {
4534 text: self.text.clone(),
4535 syntax: self.syntax.clone(),
4536 file: self.file.clone(),
4537 remote_selections: self.remote_selections.clone(),
4538 diagnostics: self.diagnostics.clone(),
4539 language: self.language.clone(),
4540 non_text_state_update_count: self.non_text_state_update_count,
4541 }
4542 }
4543}
4544
4545impl Deref for BufferSnapshot {
4546 type Target = text::BufferSnapshot;
4547
4548 fn deref(&self) -> &Self::Target {
4549 &self.text
4550 }
4551}
4552
4553unsafe impl Send for BufferChunks<'_> {}
4554
4555impl<'a> BufferChunks<'a> {
4556 pub(crate) fn new(
4557 text: &'a Rope,
4558 range: Range<usize>,
4559 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4560 diagnostics: bool,
4561 buffer_snapshot: Option<&'a BufferSnapshot>,
4562 ) -> Self {
4563 let mut highlights = None;
4564 if let Some((captures, highlight_maps)) = syntax {
4565 highlights = Some(BufferChunkHighlights {
4566 captures,
4567 next_capture: None,
4568 stack: Default::default(),
4569 highlight_maps,
4570 })
4571 }
4572
4573 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4574 let chunks = text.chunks_in_range(range.clone());
4575
4576 let mut this = BufferChunks {
4577 range,
4578 buffer_snapshot,
4579 chunks,
4580 diagnostic_endpoints,
4581 error_depth: 0,
4582 warning_depth: 0,
4583 information_depth: 0,
4584 hint_depth: 0,
4585 unnecessary_depth: 0,
4586 underline: true,
4587 highlights,
4588 };
4589 this.initialize_diagnostic_endpoints();
4590 this
4591 }
4592
4593 /// Seeks to the given byte offset in the buffer.
4594 pub fn seek(&mut self, range: Range<usize>) {
4595 let old_range = std::mem::replace(&mut self.range, range.clone());
4596 self.chunks.set_range(self.range.clone());
4597 if let Some(highlights) = self.highlights.as_mut() {
4598 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4599 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4600 highlights
4601 .stack
4602 .retain(|(end_offset, _)| *end_offset > range.start);
4603 if let Some(capture) = &highlights.next_capture
4604 && range.start >= capture.node.start_byte()
4605 {
4606 let next_capture_end = capture.node.end_byte();
4607 if range.start < next_capture_end {
4608 highlights.stack.push((
4609 next_capture_end,
4610 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4611 ));
4612 }
4613 highlights.next_capture.take();
4614 }
4615 } else if let Some(snapshot) = self.buffer_snapshot {
4616 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4617 *highlights = BufferChunkHighlights {
4618 captures,
4619 next_capture: None,
4620 stack: Default::default(),
4621 highlight_maps,
4622 };
4623 } else {
4624 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4625 // Seeking such BufferChunks is not supported.
4626 debug_assert!(
4627 false,
4628 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4629 );
4630 }
4631
4632 highlights.captures.set_byte_range(self.range.clone());
4633 self.initialize_diagnostic_endpoints();
4634 }
4635 }
4636
4637 fn initialize_diagnostic_endpoints(&mut self) {
4638 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4639 && let Some(buffer) = self.buffer_snapshot
4640 {
4641 let mut diagnostic_endpoints = Vec::new();
4642 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4643 diagnostic_endpoints.push(DiagnosticEndpoint {
4644 offset: entry.range.start,
4645 is_start: true,
4646 severity: entry.diagnostic.severity,
4647 is_unnecessary: entry.diagnostic.is_unnecessary,
4648 underline: entry.diagnostic.underline,
4649 });
4650 diagnostic_endpoints.push(DiagnosticEndpoint {
4651 offset: entry.range.end,
4652 is_start: false,
4653 severity: entry.diagnostic.severity,
4654 is_unnecessary: entry.diagnostic.is_unnecessary,
4655 underline: entry.diagnostic.underline,
4656 });
4657 }
4658 diagnostic_endpoints
4659 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4660 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4661 self.hint_depth = 0;
4662 self.error_depth = 0;
4663 self.warning_depth = 0;
4664 self.information_depth = 0;
4665 }
4666 }
4667
4668 /// The current byte offset in the buffer.
4669 pub fn offset(&self) -> usize {
4670 self.range.start
4671 }
4672
4673 pub fn range(&self) -> Range<usize> {
4674 self.range.clone()
4675 }
4676
4677 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4678 let depth = match endpoint.severity {
4679 DiagnosticSeverity::ERROR => &mut self.error_depth,
4680 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4681 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4682 DiagnosticSeverity::HINT => &mut self.hint_depth,
4683 _ => return,
4684 };
4685 if endpoint.is_start {
4686 *depth += 1;
4687 } else {
4688 *depth -= 1;
4689 }
4690
4691 if endpoint.is_unnecessary {
4692 if endpoint.is_start {
4693 self.unnecessary_depth += 1;
4694 } else {
4695 self.unnecessary_depth -= 1;
4696 }
4697 }
4698 }
4699
4700 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4701 if self.error_depth > 0 {
4702 Some(DiagnosticSeverity::ERROR)
4703 } else if self.warning_depth > 0 {
4704 Some(DiagnosticSeverity::WARNING)
4705 } else if self.information_depth > 0 {
4706 Some(DiagnosticSeverity::INFORMATION)
4707 } else if self.hint_depth > 0 {
4708 Some(DiagnosticSeverity::HINT)
4709 } else {
4710 None
4711 }
4712 }
4713
4714 fn current_code_is_unnecessary(&self) -> bool {
4715 self.unnecessary_depth > 0
4716 }
4717}
4718
4719impl<'a> Iterator for BufferChunks<'a> {
4720 type Item = Chunk<'a>;
4721
4722 fn next(&mut self) -> Option<Self::Item> {
4723 let mut next_capture_start = usize::MAX;
4724 let mut next_diagnostic_endpoint = usize::MAX;
4725
4726 if let Some(highlights) = self.highlights.as_mut() {
4727 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4728 if *parent_capture_end <= self.range.start {
4729 highlights.stack.pop();
4730 } else {
4731 break;
4732 }
4733 }
4734
4735 if highlights.next_capture.is_none() {
4736 highlights.next_capture = highlights.captures.next();
4737 }
4738
4739 while let Some(capture) = highlights.next_capture.as_ref() {
4740 if self.range.start < capture.node.start_byte() {
4741 next_capture_start = capture.node.start_byte();
4742 break;
4743 } else {
4744 let highlight_id =
4745 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4746 highlights
4747 .stack
4748 .push((capture.node.end_byte(), highlight_id));
4749 highlights.next_capture = highlights.captures.next();
4750 }
4751 }
4752 }
4753
4754 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4755 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4756 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4757 if endpoint.offset <= self.range.start {
4758 self.update_diagnostic_depths(endpoint);
4759 diagnostic_endpoints.next();
4760 self.underline = endpoint.underline;
4761 } else {
4762 next_diagnostic_endpoint = endpoint.offset;
4763 break;
4764 }
4765 }
4766 }
4767 self.diagnostic_endpoints = diagnostic_endpoints;
4768
4769 if let Some(chunk) = self.chunks.peek() {
4770 let chunk_start = self.range.start;
4771 let mut chunk_end = (self.chunks.offset() + chunk.len())
4772 .min(next_capture_start)
4773 .min(next_diagnostic_endpoint);
4774 let mut highlight_id = None;
4775 if let Some(highlights) = self.highlights.as_ref()
4776 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
4777 {
4778 chunk_end = chunk_end.min(*parent_capture_end);
4779 highlight_id = Some(*parent_highlight_id);
4780 }
4781
4782 let slice =
4783 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4784 self.range.start = chunk_end;
4785 if self.range.start == self.chunks.offset() + chunk.len() {
4786 self.chunks.next().unwrap();
4787 }
4788
4789 Some(Chunk {
4790 text: slice,
4791 syntax_highlight_id: highlight_id,
4792 underline: self.underline,
4793 diagnostic_severity: self.current_diagnostic_severity(),
4794 is_unnecessary: self.current_code_is_unnecessary(),
4795 ..Chunk::default()
4796 })
4797 } else {
4798 None
4799 }
4800 }
4801}
4802
4803impl operation_queue::Operation for Operation {
4804 fn lamport_timestamp(&self) -> clock::Lamport {
4805 match self {
4806 Operation::Buffer(_) => {
4807 unreachable!("buffer operations should never be deferred at this layer")
4808 }
4809 Operation::UpdateDiagnostics {
4810 lamport_timestamp, ..
4811 }
4812 | Operation::UpdateSelections {
4813 lamport_timestamp, ..
4814 }
4815 | Operation::UpdateCompletionTriggers {
4816 lamport_timestamp, ..
4817 } => *lamport_timestamp,
4818 }
4819 }
4820}
4821
4822impl Default for Diagnostic {
4823 fn default() -> Self {
4824 Self {
4825 source: Default::default(),
4826 source_kind: DiagnosticSourceKind::Other,
4827 code: None,
4828 code_description: None,
4829 severity: DiagnosticSeverity::ERROR,
4830 message: Default::default(),
4831 markdown: None,
4832 group_id: 0,
4833 is_primary: false,
4834 is_disk_based: false,
4835 is_unnecessary: false,
4836 underline: true,
4837 data: None,
4838 }
4839 }
4840}
4841
4842impl IndentSize {
4843 /// Returns an [`IndentSize`] representing the given spaces.
4844 pub fn spaces(len: u32) -> Self {
4845 Self {
4846 len,
4847 kind: IndentKind::Space,
4848 }
4849 }
4850
4851 /// Returns an [`IndentSize`] representing a tab.
4852 pub fn tab() -> Self {
4853 Self {
4854 len: 1,
4855 kind: IndentKind::Tab,
4856 }
4857 }
4858
4859 /// An iterator over the characters represented by this [`IndentSize`].
4860 pub fn chars(&self) -> impl Iterator<Item = char> {
4861 iter::repeat(self.char()).take(self.len as usize)
4862 }
4863
4864 /// The character representation of this [`IndentSize`].
4865 pub fn char(&self) -> char {
4866 match self.kind {
4867 IndentKind::Space => ' ',
4868 IndentKind::Tab => '\t',
4869 }
4870 }
4871
4872 /// Consumes the current [`IndentSize`] and returns a new one that has
4873 /// been shrunk or enlarged by the given size along the given direction.
4874 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4875 match direction {
4876 Ordering::Less => {
4877 if self.kind == size.kind && self.len >= size.len {
4878 self.len -= size.len;
4879 }
4880 }
4881 Ordering::Equal => {}
4882 Ordering::Greater => {
4883 if self.len == 0 {
4884 self = size;
4885 } else if self.kind == size.kind {
4886 self.len += size.len;
4887 }
4888 }
4889 }
4890 self
4891 }
4892
4893 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4894 match self.kind {
4895 IndentKind::Space => self.len as usize,
4896 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4897 }
4898 }
4899}
4900
4901#[cfg(any(test, feature = "test-support"))]
4902pub struct TestFile {
4903 pub path: Arc<Path>,
4904 pub root_name: String,
4905 pub local_root: Option<PathBuf>,
4906}
4907
4908#[cfg(any(test, feature = "test-support"))]
4909impl File for TestFile {
4910 fn path(&self) -> &Arc<Path> {
4911 &self.path
4912 }
4913
4914 fn full_path(&self, _: &gpui::App) -> PathBuf {
4915 PathBuf::from(&self.root_name).join(self.path.as_ref())
4916 }
4917
4918 fn as_local(&self) -> Option<&dyn LocalFile> {
4919 if self.local_root.is_some() {
4920 Some(self)
4921 } else {
4922 None
4923 }
4924 }
4925
4926 fn disk_state(&self) -> DiskState {
4927 unimplemented!()
4928 }
4929
4930 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4931 self.path().file_name().unwrap_or(self.root_name.as_ref())
4932 }
4933
4934 fn worktree_id(&self, _: &App) -> WorktreeId {
4935 WorktreeId::from_usize(0)
4936 }
4937
4938 fn to_proto(&self, _: &App) -> rpc::proto::File {
4939 unimplemented!()
4940 }
4941
4942 fn is_private(&self) -> bool {
4943 false
4944 }
4945}
4946
4947#[cfg(any(test, feature = "test-support"))]
4948impl LocalFile for TestFile {
4949 fn abs_path(&self, _cx: &App) -> PathBuf {
4950 PathBuf::from(self.local_root.as_ref().unwrap())
4951 .join(&self.root_name)
4952 .join(self.path.as_ref())
4953 }
4954
4955 fn load(&self, _cx: &App) -> Task<Result<String>> {
4956 unimplemented!()
4957 }
4958
4959 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4960 unimplemented!()
4961 }
4962}
4963
4964pub(crate) fn contiguous_ranges(
4965 values: impl Iterator<Item = u32>,
4966 max_len: usize,
4967) -> impl Iterator<Item = Range<u32>> {
4968 let mut values = values;
4969 let mut current_range: Option<Range<u32>> = None;
4970 std::iter::from_fn(move || {
4971 loop {
4972 if let Some(value) = values.next() {
4973 if let Some(range) = &mut current_range
4974 && value == range.end
4975 && range.len() < max_len
4976 {
4977 range.end += 1;
4978 continue;
4979 }
4980
4981 let prev_range = current_range.clone();
4982 current_range = Some(value..(value + 1));
4983 if prev_range.is_some() {
4984 return prev_range;
4985 }
4986 } else {
4987 return current_range.take();
4988 }
4989 }
4990 })
4991}
4992
4993#[derive(Default, Debug)]
4994pub struct CharClassifier {
4995 scope: Option<LanguageScope>,
4996 for_completion: bool,
4997 ignore_punctuation: bool,
4998}
4999
5000impl CharClassifier {
5001 pub fn new(scope: Option<LanguageScope>) -> Self {
5002 Self {
5003 scope,
5004 for_completion: false,
5005 ignore_punctuation: false,
5006 }
5007 }
5008
5009 pub fn for_completion(self, for_completion: bool) -> Self {
5010 Self {
5011 for_completion,
5012 ..self
5013 }
5014 }
5015
5016 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5017 Self {
5018 ignore_punctuation,
5019 ..self
5020 }
5021 }
5022
5023 pub fn is_whitespace(&self, c: char) -> bool {
5024 self.kind(c) == CharKind::Whitespace
5025 }
5026
5027 pub fn is_word(&self, c: char) -> bool {
5028 self.kind(c) == CharKind::Word
5029 }
5030
5031 pub fn is_punctuation(&self, c: char) -> bool {
5032 self.kind(c) == CharKind::Punctuation
5033 }
5034
5035 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5036 if c.is_alphanumeric() || c == '_' {
5037 return CharKind::Word;
5038 }
5039
5040 if let Some(scope) = &self.scope {
5041 let characters = if self.for_completion {
5042 scope.completion_query_characters()
5043 } else {
5044 scope.word_characters()
5045 };
5046 if let Some(characters) = characters
5047 && characters.contains(&c)
5048 {
5049 return CharKind::Word;
5050 }
5051 }
5052
5053 if c.is_whitespace() {
5054 return CharKind::Whitespace;
5055 }
5056
5057 if ignore_punctuation {
5058 CharKind::Word
5059 } else {
5060 CharKind::Punctuation
5061 }
5062 }
5063
5064 pub fn kind(&self, c: char) -> CharKind {
5065 self.kind_with(c, self.ignore_punctuation)
5066 }
5067}
5068
5069/// Find all of the ranges of whitespace that occur at the ends of lines
5070/// in the given rope.
5071///
5072/// This could also be done with a regex search, but this implementation
5073/// avoids copying text.
5074pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5075 let mut ranges = Vec::new();
5076
5077 let mut offset = 0;
5078 let mut prev_chunk_trailing_whitespace_range = 0..0;
5079 for chunk in rope.chunks() {
5080 let mut prev_line_trailing_whitespace_range = 0..0;
5081 for (i, line) in chunk.split('\n').enumerate() {
5082 let line_end_offset = offset + line.len();
5083 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5084 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5085
5086 if i == 0 && trimmed_line_len == 0 {
5087 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5088 }
5089 if !prev_line_trailing_whitespace_range.is_empty() {
5090 ranges.push(prev_line_trailing_whitespace_range);
5091 }
5092
5093 offset = line_end_offset + 1;
5094 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5095 }
5096
5097 offset -= 1;
5098 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5099 }
5100
5101 if !prev_chunk_trailing_whitespace_range.is_empty() {
5102 ranges.push(prev_chunk_trailing_whitespace_range);
5103 }
5104
5105 ranges
5106}