1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// A machine-readable code that identifies this diagnostic.
241 pub code: Option<NumberOrString>,
242 pub code_description: Option<lsp::Uri>,
243 /// Whether this diagnostic is a hint, warning, or error.
244 pub severity: DiagnosticSeverity,
245 /// The human-readable message associated with this diagnostic.
246 pub message: String,
247 /// The human-readable message (in markdown format)
248 pub markdown: Option<String>,
249 /// An id that identifies the group to which this diagnostic belongs.
250 ///
251 /// When a language server produces a diagnostic with
252 /// one or more associated diagnostics, those diagnostics are all
253 /// assigned a single group ID.
254 pub group_id: usize,
255 /// Whether this diagnostic is the primary diagnostic for its group.
256 ///
257 /// In a given group, the primary diagnostic is the top-level diagnostic
258 /// returned by the language server. The non-primary diagnostics are the
259 /// associated diagnostics.
260 pub is_primary: bool,
261 /// Whether this diagnostic is considered to originate from an analysis of
262 /// files on disk, as opposed to any unsaved buffer contents. This is a
263 /// property of a given diagnostic source, and is configured for a given
264 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
265 /// for the language server.
266 pub is_disk_based: bool,
267 /// Whether this diagnostic marks unnecessary code.
268 pub is_unnecessary: bool,
269 /// Quick separation of diagnostics groups based by their source.
270 pub source_kind: DiagnosticSourceKind,
271 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
272 pub data: Option<Value>,
273 /// Whether to underline the corresponding text range in the editor.
274 pub underline: bool,
275}
276
277#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
278pub enum DiagnosticSourceKind {
279 Pulled,
280 Pushed,
281 Other,
282}
283
284/// An operation used to synchronize this buffer with its other replicas.
285#[derive(Clone, Debug, PartialEq)]
286pub enum Operation {
287 /// A text operation.
288 Buffer(text::Operation),
289
290 /// An update to the buffer's diagnostics.
291 UpdateDiagnostics {
292 /// The id of the language server that produced the new diagnostics.
293 server_id: LanguageServerId,
294 /// The diagnostics.
295 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
296 /// The buffer's lamport timestamp.
297 lamport_timestamp: clock::Lamport,
298 },
299
300 /// An update to the most recent selections in this buffer.
301 UpdateSelections {
302 /// The selections.
303 selections: Arc<[Selection<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 /// Whether the selections are in 'line mode'.
307 line_mode: bool,
308 /// The [`CursorShape`] associated with these selections.
309 cursor_shape: CursorShape,
310 },
311
312 /// An update to the characters that should trigger autocompletion
313 /// for this buffer.
314 UpdateCompletionTriggers {
315 /// The characters that trigger autocompletion.
316 triggers: Vec<String>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// The language server ID.
320 server_id: LanguageServerId,
321 },
322
323 /// An update to the line ending type of this buffer.
324 UpdateLineEnding {
325 /// The line ending type.
326 line_ending: LineEnding,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 },
330}
331
332/// An event that occurs in a buffer.
333#[derive(Clone, Debug, PartialEq)]
334pub enum BufferEvent {
335 /// The buffer was changed in a way that must be
336 /// propagated to its other replicas.
337 Operation {
338 operation: Operation,
339 is_local: bool,
340 },
341 /// The buffer was edited.
342 Edited,
343 /// The buffer's `dirty` bit changed.
344 DirtyChanged,
345 /// The buffer was saved.
346 Saved,
347 /// The buffer's file was changed on disk.
348 FileHandleChanged,
349 /// The buffer was reloaded.
350 Reloaded,
351 /// The buffer is in need of a reload
352 ReloadNeeded,
353 /// The buffer's language was changed.
354 LanguageChanged,
355 /// The buffer's syntax trees were updated.
356 Reparsed,
357 /// The buffer's diagnostics were updated.
358 DiagnosticsUpdated,
359 /// The buffer gained or lost editing capabilities.
360 CapabilityChanged,
361}
362
363/// The file associated with a buffer.
364pub trait File: Send + Sync + Any {
365 /// Returns the [`LocalFile`] associated with this file, if the
366 /// file is local.
367 fn as_local(&self) -> Option<&dyn LocalFile>;
368
369 /// Returns whether this file is local.
370 fn is_local(&self) -> bool {
371 self.as_local().is_some()
372 }
373
374 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
375 /// only available in some states, such as modification time.
376 fn disk_state(&self) -> DiskState;
377
378 /// Returns the path of this file relative to the worktree's root directory.
379 fn path(&self) -> &Arc<RelPath>;
380
381 /// Returns the path of this file relative to the worktree's parent directory (this means it
382 /// includes the name of the worktree's root folder).
383 fn full_path(&self, cx: &App) -> PathBuf;
384
385 /// Returns the path style of this file.
386 fn path_style(&self, cx: &App) -> PathStyle;
387
388 /// Returns the last component of this handle's absolute path. If this handle refers to the root
389 /// of its worktree, then this method will return the name of the worktree itself.
390 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
391
392 /// Returns the id of the worktree to which this file belongs.
393 ///
394 /// This is needed for looking up project-specific settings.
395 fn worktree_id(&self, cx: &App) -> WorktreeId;
396
397 /// Converts this file into a protobuf message.
398 fn to_proto(&self, cx: &App) -> rpc::proto::File;
399
400 /// Return whether Zed considers this to be a private file.
401 fn is_private(&self) -> bool;
402}
403
404/// The file's storage status - whether it's stored (`Present`), and if so when it was last
405/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
406/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
407/// indicator for new files.
408#[derive(Copy, Clone, Debug, PartialEq)]
409pub enum DiskState {
410 /// File created in Zed that has not been saved.
411 New,
412 /// File present on the filesystem.
413 Present { mtime: MTime },
414 /// Deleted file that was previously present.
415 Deleted,
416}
417
418impl DiskState {
419 /// Returns the file's last known modification time on disk.
420 pub fn mtime(self) -> Option<MTime> {
421 match self {
422 DiskState::New => None,
423 DiskState::Present { mtime } => Some(mtime),
424 DiskState::Deleted => None,
425 }
426 }
427
428 pub fn exists(&self) -> bool {
429 match self {
430 DiskState::New => false,
431 DiskState::Present { .. } => true,
432 DiskState::Deleted => false,
433 }
434 }
435}
436
437/// The file associated with a buffer, in the case where the file is on the local disk.
438pub trait LocalFile: File {
439 /// Returns the absolute path of this file
440 fn abs_path(&self, cx: &App) -> PathBuf;
441
442 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
443 fn load(&self, cx: &App) -> Task<Result<String>>;
444
445 /// Loads the file's contents from disk.
446 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
447}
448
449/// The auto-indent behavior associated with an editing operation.
450/// For some editing operations, each affected line of text has its
451/// indentation recomputed. For other operations, the entire block
452/// of edited text is adjusted uniformly.
453#[derive(Clone, Debug)]
454pub enum AutoindentMode {
455 /// Indent each line of inserted text.
456 EachLine,
457 /// Apply the same indentation adjustment to all of the lines
458 /// in a given insertion.
459 Block {
460 /// The original indentation column of the first line of each
461 /// insertion, if it has been copied.
462 ///
463 /// Knowing this makes it possible to preserve the relative indentation
464 /// of every line in the insertion from when it was copied.
465 ///
466 /// If the original indent column is `a`, and the first line of insertion
467 /// is then auto-indented to column `b`, then every other line of
468 /// the insertion will be auto-indented to column `b - a`
469 original_indent_columns: Vec<Option<u32>>,
470 },
471}
472
473#[derive(Clone)]
474struct AutoindentRequest {
475 before_edit: BufferSnapshot,
476 entries: Vec<AutoindentRequestEntry>,
477 is_block_mode: bool,
478 ignore_empty_lines: bool,
479}
480
481#[derive(Debug, Clone)]
482struct AutoindentRequestEntry {
483 /// A range of the buffer whose indentation should be adjusted.
484 range: Range<Anchor>,
485 /// Whether or not these lines should be considered brand new, for the
486 /// purpose of auto-indent. When text is not new, its indentation will
487 /// only be adjusted if the suggested indentation level has *changed*
488 /// since the edit was made.
489 first_line_is_new: bool,
490 indent_size: IndentSize,
491 original_indent_column: Option<u32>,
492}
493
494#[derive(Debug)]
495struct IndentSuggestion {
496 basis_row: u32,
497 delta: Ordering,
498 within_error: bool,
499}
500
501struct BufferChunkHighlights<'a> {
502 captures: SyntaxMapCaptures<'a>,
503 next_capture: Option<SyntaxMapCapture<'a>>,
504 stack: Vec<(usize, HighlightId)>,
505 highlight_maps: Vec<HighlightMap>,
506}
507
508/// An iterator that yields chunks of a buffer's text, along with their
509/// syntax highlights and diagnostic status.
510pub struct BufferChunks<'a> {
511 buffer_snapshot: Option<&'a BufferSnapshot>,
512 range: Range<usize>,
513 chunks: text::Chunks<'a>,
514 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
515 error_depth: usize,
516 warning_depth: usize,
517 information_depth: usize,
518 hint_depth: usize,
519 unnecessary_depth: usize,
520 underline: bool,
521 highlights: Option<BufferChunkHighlights<'a>>,
522}
523
524/// A chunk of a buffer's text, along with its syntax highlight and
525/// diagnostic status.
526#[derive(Clone, Debug, Default)]
527pub struct Chunk<'a> {
528 /// The text of the chunk.
529 pub text: &'a str,
530 /// The syntax highlighting style of the chunk.
531 pub syntax_highlight_id: Option<HighlightId>,
532 /// The highlight style that has been applied to this chunk in
533 /// the editor.
534 pub highlight_style: Option<HighlightStyle>,
535 /// The severity of diagnostic associated with this chunk, if any.
536 pub diagnostic_severity: Option<DiagnosticSeverity>,
537 /// A bitset of which characters are tabs in this string.
538 pub tabs: u128,
539 /// Bitmap of character indices in this chunk
540 pub chars: u128,
541 /// Whether this chunk of text is marked as unnecessary.
542 pub is_unnecessary: bool,
543 /// Whether this chunk of text was originally a tab character.
544 pub is_tab: bool,
545 /// Whether this chunk of text was originally an inlay.
546 pub is_inlay: bool,
547 /// Whether to underline the corresponding text range in the editor.
548 pub underline: bool,
549}
550
551/// A set of edits to a given version of a buffer, computed asynchronously.
552#[derive(Debug)]
553pub struct Diff {
554 pub base_version: clock::Global,
555 pub line_ending: LineEnding,
556 pub edits: Vec<(Range<usize>, Arc<str>)>,
557}
558
559#[derive(Debug, Clone, Copy)]
560pub(crate) struct DiagnosticEndpoint {
561 offset: usize,
562 is_start: bool,
563 underline: bool,
564 severity: DiagnosticSeverity,
565 is_unnecessary: bool,
566}
567
568/// A class of characters, used for characterizing a run of text.
569#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
570pub enum CharKind {
571 /// Whitespace.
572 Whitespace,
573 /// Punctuation.
574 Punctuation,
575 /// Word.
576 Word,
577}
578
579/// Context for character classification within a specific scope.
580#[derive(Copy, Clone, Eq, PartialEq, Debug)]
581pub enum CharScopeContext {
582 /// Character classification for completion queries.
583 ///
584 /// This context treats certain characters as word constituents that would
585 /// normally be considered punctuation, such as '-' in Tailwind classes
586 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
587 Completion,
588 /// Character classification for linked edits.
589 ///
590 /// This context handles characters that should be treated as part of
591 /// identifiers during linked editing operations, such as '.' in JSX
592 /// component names like `<Animated.View>`.
593 LinkedEdit,
594}
595
596/// A runnable is a set of data about a region that could be resolved into a task
597pub struct Runnable {
598 pub tags: SmallVec<[RunnableTag; 1]>,
599 pub language: Arc<Language>,
600 pub buffer: BufferId,
601}
602
603#[derive(Default, Clone, Debug)]
604pub struct HighlightedText {
605 pub text: SharedString,
606 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
607}
608
609#[derive(Default, Debug)]
610struct HighlightedTextBuilder {
611 pub text: String,
612 highlights: Vec<(Range<usize>, HighlightStyle)>,
613}
614
615impl HighlightedText {
616 pub fn from_buffer_range<T: ToOffset>(
617 range: Range<T>,
618 snapshot: &text::BufferSnapshot,
619 syntax_snapshot: &SyntaxSnapshot,
620 override_style: Option<HighlightStyle>,
621 syntax_theme: &SyntaxTheme,
622 ) -> Self {
623 let mut highlighted_text = HighlightedTextBuilder::default();
624 highlighted_text.add_text_from_buffer_range(
625 range,
626 snapshot,
627 syntax_snapshot,
628 override_style,
629 syntax_theme,
630 );
631 highlighted_text.build()
632 }
633
634 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
635 gpui::StyledText::new(self.text.clone())
636 .with_default_highlights(default_style, self.highlights.iter().cloned())
637 }
638
639 /// Returns the first line without leading whitespace unless highlighted
640 /// and a boolean indicating if there are more lines after
641 pub fn first_line_preview(self) -> (Self, bool) {
642 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
643 let first_line = &self.text[..newline_ix];
644
645 // Trim leading whitespace, unless an edit starts prior to it.
646 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
647 if let Some((first_highlight_range, _)) = self.highlights.first() {
648 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
649 }
650
651 let preview_text = &first_line[preview_start_ix..];
652 let preview_highlights = self
653 .highlights
654 .into_iter()
655 .skip_while(|(range, _)| range.end <= preview_start_ix)
656 .take_while(|(range, _)| range.start < newline_ix)
657 .filter_map(|(mut range, highlight)| {
658 range.start = range.start.saturating_sub(preview_start_ix);
659 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
660 if range.is_empty() {
661 None
662 } else {
663 Some((range, highlight))
664 }
665 });
666
667 let preview = Self {
668 text: SharedString::new(preview_text),
669 highlights: preview_highlights.collect(),
670 };
671
672 (preview, self.text.len() > newline_ix)
673 }
674}
675
676impl HighlightedTextBuilder {
677 pub fn build(self) -> HighlightedText {
678 HighlightedText {
679 text: self.text.into(),
680 highlights: self.highlights,
681 }
682 }
683
684 pub fn add_text_from_buffer_range<T: ToOffset>(
685 &mut self,
686 range: Range<T>,
687 snapshot: &text::BufferSnapshot,
688 syntax_snapshot: &SyntaxSnapshot,
689 override_style: Option<HighlightStyle>,
690 syntax_theme: &SyntaxTheme,
691 ) {
692 let range = range.to_offset(snapshot);
693 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
694 let start = self.text.len();
695 self.text.push_str(chunk.text);
696 let end = self.text.len();
697
698 if let Some(highlight_style) = chunk
699 .syntax_highlight_id
700 .and_then(|id| id.style(syntax_theme))
701 {
702 let highlight_style = override_style.map_or(highlight_style, |override_style| {
703 highlight_style.highlight(override_style)
704 });
705 self.highlights.push((start..end, highlight_style));
706 } else if let Some(override_style) = override_style {
707 self.highlights.push((start..end, override_style));
708 }
709 }
710 }
711
712 fn highlighted_chunks<'a>(
713 range: Range<usize>,
714 snapshot: &'a text::BufferSnapshot,
715 syntax_snapshot: &'a SyntaxSnapshot,
716 ) -> BufferChunks<'a> {
717 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
718 grammar
719 .highlights_config
720 .as_ref()
721 .map(|config| &config.query)
722 });
723
724 let highlight_maps = captures
725 .grammars()
726 .iter()
727 .map(|grammar| grammar.highlight_map())
728 .collect();
729
730 BufferChunks::new(
731 snapshot.as_rope(),
732 range,
733 Some((captures, highlight_maps)),
734 false,
735 None,
736 )
737 }
738}
739
740#[derive(Clone)]
741pub struct EditPreview {
742 old_snapshot: text::BufferSnapshot,
743 applied_edits_snapshot: text::BufferSnapshot,
744 syntax_snapshot: SyntaxSnapshot,
745}
746
747impl EditPreview {
748 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
749 let (first, _) = edits.first()?;
750 let (last, _) = edits.last()?;
751
752 let start = first.start.to_point(&self.old_snapshot);
753 let old_end = last.end.to_point(&self.old_snapshot);
754 let new_end = last
755 .end
756 .bias_right(&self.old_snapshot)
757 .to_point(&self.applied_edits_snapshot);
758
759 let start = Point::new(start.row.saturating_sub(3), 0);
760 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
761 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
762
763 Some(unified_diff(
764 &self
765 .old_snapshot
766 .text_for_range(start..old_end)
767 .collect::<String>(),
768 &self
769 .applied_edits_snapshot
770 .text_for_range(start..new_end)
771 .collect::<String>(),
772 ))
773 }
774
775 pub fn highlight_edits(
776 &self,
777 current_snapshot: &BufferSnapshot,
778 edits: &[(Range<Anchor>, impl AsRef<str>)],
779 include_deletions: bool,
780 cx: &App,
781 ) -> HighlightedText {
782 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
783 return HighlightedText::default();
784 };
785
786 let mut highlighted_text = HighlightedTextBuilder::default();
787
788 let visible_range_in_preview_snapshot =
789 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
790 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
791
792 let insertion_highlight_style = HighlightStyle {
793 background_color: Some(cx.theme().status().created_background),
794 ..Default::default()
795 };
796 let deletion_highlight_style = HighlightStyle {
797 background_color: Some(cx.theme().status().deleted_background),
798 ..Default::default()
799 };
800 let syntax_theme = cx.theme().syntax();
801
802 for (range, edit_text) in edits {
803 let edit_new_end_in_preview_snapshot = range
804 .end
805 .bias_right(&self.old_snapshot)
806 .to_offset(&self.applied_edits_snapshot);
807 let edit_start_in_preview_snapshot =
808 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
809
810 let unchanged_range_in_preview_snapshot =
811 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
812 if !unchanged_range_in_preview_snapshot.is_empty() {
813 highlighted_text.add_text_from_buffer_range(
814 unchanged_range_in_preview_snapshot,
815 &self.applied_edits_snapshot,
816 &self.syntax_snapshot,
817 None,
818 syntax_theme,
819 );
820 }
821
822 let range_in_current_snapshot = range.to_offset(current_snapshot);
823 if include_deletions && !range_in_current_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 range_in_current_snapshot,
826 ¤t_snapshot.text,
827 ¤t_snapshot.syntax,
828 Some(deletion_highlight_style),
829 syntax_theme,
830 );
831 }
832
833 if !edit_text.as_ref().is_empty() {
834 highlighted_text.add_text_from_buffer_range(
835 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
836 &self.applied_edits_snapshot,
837 &self.syntax_snapshot,
838 Some(insertion_highlight_style),
839 syntax_theme,
840 );
841 }
842
843 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
844 }
845
846 highlighted_text.add_text_from_buffer_range(
847 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
848 &self.applied_edits_snapshot,
849 &self.syntax_snapshot,
850 None,
851 syntax_theme,
852 );
853
854 highlighted_text.build()
855 }
856
857 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
858 cx.new(|cx| {
859 let mut buffer = Buffer::local_normalized(
860 self.applied_edits_snapshot.as_rope().clone(),
861 self.applied_edits_snapshot.line_ending(),
862 cx,
863 );
864 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
865 buffer
866 })
867 }
868
869 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
870 let (first, _) = edits.first()?;
871 let (last, _) = edits.last()?;
872
873 let start = first
874 .start
875 .bias_left(&self.old_snapshot)
876 .to_point(&self.applied_edits_snapshot);
877 let end = last
878 .end
879 .bias_right(&self.old_snapshot)
880 .to_point(&self.applied_edits_snapshot);
881
882 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
883 let range = Point::new(start.row, 0)
884 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
885
886 Some(range)
887 }
888}
889
890#[derive(Clone, Debug, PartialEq, Eq)]
891pub struct BracketMatch<T> {
892 pub open_range: Range<T>,
893 pub close_range: Range<T>,
894 pub newline_only: bool,
895 pub syntax_layer_depth: usize,
896 pub color_index: Option<usize>,
897}
898
899impl<T> BracketMatch<T> {
900 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
901 (self.open_range, self.close_range)
902 }
903}
904
905impl Buffer {
906 /// Create a new buffer with the given base text.
907 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
908 Self::build(
909 TextBuffer::new(
910 ReplicaId::LOCAL,
911 cx.entity_id().as_non_zero_u64().into(),
912 base_text.into(),
913 ),
914 None,
915 Capability::ReadWrite,
916 )
917 }
918
919 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
920 pub fn local_normalized(
921 base_text_normalized: Rope,
922 line_ending: LineEnding,
923 cx: &Context<Self>,
924 ) -> Self {
925 Self::build(
926 TextBuffer::new_normalized(
927 ReplicaId::LOCAL,
928 cx.entity_id().as_non_zero_u64().into(),
929 line_ending,
930 base_text_normalized,
931 ),
932 None,
933 Capability::ReadWrite,
934 )
935 }
936
937 /// Create a new buffer that is a replica of a remote buffer.
938 pub fn remote(
939 remote_id: BufferId,
940 replica_id: ReplicaId,
941 capability: Capability,
942 base_text: impl Into<String>,
943 ) -> Self {
944 Self::build(
945 TextBuffer::new(replica_id, remote_id, base_text.into()),
946 None,
947 capability,
948 )
949 }
950
951 /// Create a new buffer that is a replica of a remote buffer, populating its
952 /// state from the given protobuf message.
953 pub fn from_proto(
954 replica_id: ReplicaId,
955 capability: Capability,
956 message: proto::BufferState,
957 file: Option<Arc<dyn File>>,
958 ) -> Result<Self> {
959 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
960 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
961 let mut this = Self::build(buffer, file, capability);
962 this.text.set_line_ending(proto::deserialize_line_ending(
963 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
964 ));
965 this.saved_version = proto::deserialize_version(&message.saved_version);
966 this.saved_mtime = message.saved_mtime.map(|time| time.into());
967 Ok(this)
968 }
969
970 /// Serialize the buffer's state to a protobuf message.
971 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
972 proto::BufferState {
973 id: self.remote_id().into(),
974 file: self.file.as_ref().map(|f| f.to_proto(cx)),
975 base_text: self.base_text().to_string(),
976 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
977 saved_version: proto::serialize_version(&self.saved_version),
978 saved_mtime: self.saved_mtime.map(|time| time.into()),
979 }
980 }
981
982 /// Serialize as protobufs all of the changes to the buffer since the given version.
983 pub fn serialize_ops(
984 &self,
985 since: Option<clock::Global>,
986 cx: &App,
987 ) -> Task<Vec<proto::Operation>> {
988 let mut operations = Vec::new();
989 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
990
991 operations.extend(self.remote_selections.iter().map(|(_, set)| {
992 proto::serialize_operation(&Operation::UpdateSelections {
993 selections: set.selections.clone(),
994 lamport_timestamp: set.lamport_timestamp,
995 line_mode: set.line_mode,
996 cursor_shape: set.cursor_shape,
997 })
998 }));
999
1000 for (server_id, diagnostics) in &self.diagnostics {
1001 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1002 lamport_timestamp: self.diagnostics_timestamp,
1003 server_id: *server_id,
1004 diagnostics: diagnostics.iter().cloned().collect(),
1005 }));
1006 }
1007
1008 for (server_id, completions) in &self.completion_triggers_per_language_server {
1009 operations.push(proto::serialize_operation(
1010 &Operation::UpdateCompletionTriggers {
1011 triggers: completions.iter().cloned().collect(),
1012 lamport_timestamp: self.completion_triggers_timestamp,
1013 server_id: *server_id,
1014 },
1015 ));
1016 }
1017
1018 let text_operations = self.text.operations().clone();
1019 cx.background_spawn(async move {
1020 let since = since.unwrap_or_default();
1021 operations.extend(
1022 text_operations
1023 .iter()
1024 .filter(|(_, op)| !since.observed(op.timestamp()))
1025 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1026 );
1027 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1028 operations
1029 })
1030 }
1031
1032 /// Assign a language to the buffer, returning the buffer.
1033 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1034 self.set_language_async(Some(language), cx);
1035 self
1036 }
1037
1038 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1039 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1040 self.set_language(Some(language), cx);
1041 self
1042 }
1043
1044 /// Returns the [`Capability`] of this buffer.
1045 pub fn capability(&self) -> Capability {
1046 self.capability
1047 }
1048
1049 /// Whether this buffer can only be read.
1050 pub fn read_only(&self) -> bool {
1051 self.capability == Capability::ReadOnly
1052 }
1053
1054 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1055 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1056 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1057 let snapshot = buffer.snapshot();
1058 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1059 let tree_sitter_data = TreeSitterData::new(snapshot);
1060 Self {
1061 saved_mtime,
1062 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1063 saved_version: buffer.version(),
1064 preview_version: buffer.version(),
1065 reload_task: None,
1066 transaction_depth: 0,
1067 was_dirty_before_starting_transaction: None,
1068 has_unsaved_edits: Cell::new((buffer.version(), false)),
1069 text: buffer,
1070 branch_state: None,
1071 file,
1072 capability,
1073 syntax_map,
1074 reparse: None,
1075 non_text_state_update_count: 0,
1076 sync_parse_timeout: Duration::from_millis(1),
1077 parse_status: watch::channel(ParseStatus::Idle),
1078 autoindent_requests: Default::default(),
1079 wait_for_autoindent_txs: Default::default(),
1080 pending_autoindent: Default::default(),
1081 language: None,
1082 remote_selections: Default::default(),
1083 diagnostics: Default::default(),
1084 diagnostics_timestamp: Lamport::MIN,
1085 completion_triggers: Default::default(),
1086 completion_triggers_per_language_server: Default::default(),
1087 completion_triggers_timestamp: Lamport::MIN,
1088 deferred_ops: OperationQueue::new(),
1089 has_conflict: false,
1090 change_bits: Default::default(),
1091 _subscriptions: Vec::new(),
1092 }
1093 }
1094
1095 pub fn build_snapshot(
1096 text: Rope,
1097 language: Option<Arc<Language>>,
1098 language_registry: Option<Arc<LanguageRegistry>>,
1099 cx: &mut App,
1100 ) -> impl Future<Output = BufferSnapshot> + use<> {
1101 let entity_id = cx.reserve_entity::<Self>().entity_id();
1102 let buffer_id = entity_id.as_non_zero_u64().into();
1103 async move {
1104 let text =
1105 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1106 .snapshot();
1107 let mut syntax = SyntaxMap::new(&text).snapshot();
1108 if let Some(language) = language.clone() {
1109 let language_registry = language_registry.clone();
1110 syntax.reparse(&text, language_registry, language);
1111 }
1112 let tree_sitter_data = TreeSitterData::new(text.clone());
1113 BufferSnapshot {
1114 text,
1115 syntax,
1116 file: None,
1117 diagnostics: Default::default(),
1118 remote_selections: Default::default(),
1119 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1120 language,
1121 non_text_state_update_count: 0,
1122 }
1123 }
1124 }
1125
1126 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1127 let entity_id = cx.reserve_entity::<Self>().entity_id();
1128 let buffer_id = entity_id.as_non_zero_u64().into();
1129 let text = TextBuffer::new_normalized(
1130 ReplicaId::LOCAL,
1131 buffer_id,
1132 Default::default(),
1133 Rope::new(),
1134 )
1135 .snapshot();
1136 let syntax = SyntaxMap::new(&text).snapshot();
1137 let tree_sitter_data = TreeSitterData::new(text.clone());
1138 BufferSnapshot {
1139 text,
1140 syntax,
1141 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1142 file: None,
1143 diagnostics: Default::default(),
1144 remote_selections: Default::default(),
1145 language: None,
1146 non_text_state_update_count: 0,
1147 }
1148 }
1149
1150 #[cfg(any(test, feature = "test-support"))]
1151 pub fn build_snapshot_sync(
1152 text: Rope,
1153 language: Option<Arc<Language>>,
1154 language_registry: Option<Arc<LanguageRegistry>>,
1155 cx: &mut App,
1156 ) -> BufferSnapshot {
1157 let entity_id = cx.reserve_entity::<Self>().entity_id();
1158 let buffer_id = entity_id.as_non_zero_u64().into();
1159 let text =
1160 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1161 .snapshot();
1162 let mut syntax = SyntaxMap::new(&text).snapshot();
1163 if let Some(language) = language.clone() {
1164 syntax.reparse(&text, language_registry, language);
1165 }
1166 let tree_sitter_data = TreeSitterData::new(text.clone());
1167 BufferSnapshot {
1168 text,
1169 syntax,
1170 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1171 file: None,
1172 diagnostics: Default::default(),
1173 remote_selections: Default::default(),
1174 language,
1175 non_text_state_update_count: 0,
1176 }
1177 }
1178
1179 /// Retrieve a snapshot of the buffer's current state. This is computationally
1180 /// cheap, and allows reading from the buffer on a background thread.
1181 pub fn snapshot(&self) -> BufferSnapshot {
1182 let text = self.text.snapshot();
1183 let mut syntax_map = self.syntax_map.lock();
1184 syntax_map.interpolate(&text);
1185 let syntax = syntax_map.snapshot();
1186
1187 BufferSnapshot {
1188 text,
1189 syntax,
1190 tree_sitter_data: self.tree_sitter_data.clone(),
1191 file: self.file.clone(),
1192 remote_selections: self.remote_selections.clone(),
1193 diagnostics: self.diagnostics.clone(),
1194 language: self.language.clone(),
1195 non_text_state_update_count: self.non_text_state_update_count,
1196 }
1197 }
1198
1199 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1200 let this = cx.entity();
1201 cx.new(|cx| {
1202 let mut branch = Self {
1203 branch_state: Some(BufferBranchState {
1204 base_buffer: this.clone(),
1205 merged_operations: Default::default(),
1206 }),
1207 language: self.language.clone(),
1208 has_conflict: self.has_conflict,
1209 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1210 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1211 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1212 };
1213 if let Some(language_registry) = self.language_registry() {
1214 branch.set_language_registry(language_registry);
1215 }
1216
1217 // Reparse the branch buffer so that we get syntax highlighting immediately.
1218 branch.reparse(cx, true);
1219
1220 branch
1221 })
1222 }
1223
1224 pub fn preview_edits(
1225 &self,
1226 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1227 cx: &App,
1228 ) -> Task<EditPreview> {
1229 let registry = self.language_registry();
1230 let language = self.language().cloned();
1231 let old_snapshot = self.text.snapshot();
1232 let mut branch_buffer = self.text.branch();
1233 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1234 cx.background_spawn(async move {
1235 if !edits.is_empty() {
1236 if let Some(language) = language.clone() {
1237 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1238 }
1239
1240 branch_buffer.edit(edits.iter().cloned());
1241 let snapshot = branch_buffer.snapshot();
1242 syntax_snapshot.interpolate(&snapshot);
1243
1244 if let Some(language) = language {
1245 syntax_snapshot.reparse(&snapshot, registry, language);
1246 }
1247 }
1248 EditPreview {
1249 old_snapshot,
1250 applied_edits_snapshot: branch_buffer.snapshot(),
1251 syntax_snapshot,
1252 }
1253 })
1254 }
1255
1256 /// Applies all of the changes in this buffer that intersect any of the
1257 /// given `ranges` to its base buffer.
1258 ///
1259 /// If `ranges` is empty, then all changes will be applied. This buffer must
1260 /// be a branch buffer to call this method.
1261 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1262 let Some(base_buffer) = self.base_buffer() else {
1263 debug_panic!("not a branch buffer");
1264 return;
1265 };
1266
1267 let mut ranges = if ranges.is_empty() {
1268 &[0..usize::MAX]
1269 } else {
1270 ranges.as_slice()
1271 }
1272 .iter()
1273 .peekable();
1274
1275 let mut edits = Vec::new();
1276 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1277 let mut is_included = false;
1278 while let Some(range) = ranges.peek() {
1279 if range.end < edit.new.start {
1280 ranges.next().unwrap();
1281 } else {
1282 if range.start <= edit.new.end {
1283 is_included = true;
1284 }
1285 break;
1286 }
1287 }
1288
1289 if is_included {
1290 edits.push((
1291 edit.old.clone(),
1292 self.text_for_range(edit.new.clone()).collect::<String>(),
1293 ));
1294 }
1295 }
1296
1297 let operation = base_buffer.update(cx, |base_buffer, cx| {
1298 // cx.emit(BufferEvent::DiffBaseChanged);
1299 base_buffer.edit(edits, None, cx)
1300 });
1301
1302 if let Some(operation) = operation
1303 && let Some(BufferBranchState {
1304 merged_operations, ..
1305 }) = &mut self.branch_state
1306 {
1307 merged_operations.push(operation);
1308 }
1309 }
1310
1311 fn on_base_buffer_event(
1312 &mut self,
1313 _: Entity<Buffer>,
1314 event: &BufferEvent,
1315 cx: &mut Context<Self>,
1316 ) {
1317 let BufferEvent::Operation { operation, .. } = event else {
1318 return;
1319 };
1320 let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 else {
1324 return;
1325 };
1326
1327 let mut operation_to_undo = None;
1328 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1329 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1330 {
1331 merged_operations.remove(ix);
1332 operation_to_undo = Some(operation.timestamp);
1333 }
1334
1335 self.apply_ops([operation.clone()], cx);
1336
1337 if let Some(timestamp) = operation_to_undo {
1338 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1339 self.undo_operations(counts, cx);
1340 }
1341 }
1342
1343 #[cfg(test)]
1344 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1345 &self.text
1346 }
1347
1348 /// Retrieve a snapshot of the buffer's raw text, without any
1349 /// language-related state like the syntax tree or diagnostics.
1350 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1351 self.text.snapshot()
1352 }
1353
1354 /// The file associated with the buffer, if any.
1355 pub fn file(&self) -> Option<&Arc<dyn File>> {
1356 self.file.as_ref()
1357 }
1358
1359 /// The version of the buffer that was last saved or reloaded from disk.
1360 pub fn saved_version(&self) -> &clock::Global {
1361 &self.saved_version
1362 }
1363
1364 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1365 pub fn saved_mtime(&self) -> Option<MTime> {
1366 self.saved_mtime
1367 }
1368
1369 /// Assign a language to the buffer.
1370 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1371 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1372 }
1373
1374 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1375 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1376 self.set_language_(language, true, cx);
1377 }
1378
1379 fn set_language_(
1380 &mut self,
1381 language: Option<Arc<Language>>,
1382 may_block: bool,
1383 cx: &mut Context<Self>,
1384 ) {
1385 self.non_text_state_update_count += 1;
1386 self.syntax_map.lock().clear(&self.text);
1387 self.language = language;
1388 self.was_changed();
1389 self.reparse(cx, may_block);
1390 cx.emit(BufferEvent::LanguageChanged);
1391 }
1392
1393 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1394 /// other languages if parts of the buffer are written in different languages.
1395 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1396 self.syntax_map
1397 .lock()
1398 .set_language_registry(language_registry);
1399 }
1400
1401 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1402 self.syntax_map.lock().language_registry()
1403 }
1404
1405 /// Assign the line ending type to the buffer.
1406 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1407 self.text.set_line_ending(line_ending);
1408
1409 let lamport_timestamp = self.text.lamport_clock.tick();
1410 self.send_operation(
1411 Operation::UpdateLineEnding {
1412 line_ending,
1413 lamport_timestamp,
1414 },
1415 true,
1416 cx,
1417 );
1418 }
1419
1420 /// Assign the buffer a new [`Capability`].
1421 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1422 if self.capability != capability {
1423 self.capability = capability;
1424 cx.emit(BufferEvent::CapabilityChanged)
1425 }
1426 }
1427
1428 /// This method is called to signal that the buffer has been saved.
1429 pub fn did_save(
1430 &mut self,
1431 version: clock::Global,
1432 mtime: Option<MTime>,
1433 cx: &mut Context<Self>,
1434 ) {
1435 self.saved_version = version.clone();
1436 self.has_unsaved_edits.set((version, false));
1437 self.has_conflict = false;
1438 self.saved_mtime = mtime;
1439 self.was_changed();
1440 cx.emit(BufferEvent::Saved);
1441 cx.notify();
1442 }
1443
1444 /// Reloads the contents of the buffer from disk.
1445 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1446 let (tx, rx) = futures::channel::oneshot::channel();
1447 let prev_version = self.text.version();
1448 self.reload_task = Some(cx.spawn(async move |this, cx| {
1449 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1450 let file = this.file.as_ref()?.as_local()?;
1451
1452 Some((file.disk_state().mtime(), file.load(cx)))
1453 })?
1454 else {
1455 return Ok(());
1456 };
1457
1458 let new_text = new_text.await?;
1459 let diff = this
1460 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1461 .await;
1462 this.update(cx, |this, cx| {
1463 if this.version() == diff.base_version {
1464 this.finalize_last_transaction();
1465 this.apply_diff(diff, cx);
1466 tx.send(this.finalize_last_transaction().cloned()).ok();
1467 this.has_conflict = false;
1468 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1469 } else {
1470 if !diff.edits.is_empty()
1471 || this
1472 .edits_since::<usize>(&diff.base_version)
1473 .next()
1474 .is_some()
1475 {
1476 this.has_conflict = true;
1477 }
1478
1479 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1480 }
1481
1482 this.reload_task.take();
1483 })
1484 }));
1485 rx
1486 }
1487
1488 /// This method is called to signal that the buffer has been reloaded.
1489 pub fn did_reload(
1490 &mut self,
1491 version: clock::Global,
1492 line_ending: LineEnding,
1493 mtime: Option<MTime>,
1494 cx: &mut Context<Self>,
1495 ) {
1496 self.saved_version = version;
1497 self.has_unsaved_edits
1498 .set((self.saved_version.clone(), false));
1499 self.text.set_line_ending(line_ending);
1500 self.saved_mtime = mtime;
1501 cx.emit(BufferEvent::Reloaded);
1502 cx.notify();
1503 }
1504
1505 /// Updates the [`File`] backing this buffer. This should be called when
1506 /// the file has changed or has been deleted.
1507 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1508 let was_dirty = self.is_dirty();
1509 let mut file_changed = false;
1510
1511 if let Some(old_file) = self.file.as_ref() {
1512 if new_file.path() != old_file.path() {
1513 file_changed = true;
1514 }
1515
1516 let old_state = old_file.disk_state();
1517 let new_state = new_file.disk_state();
1518 if old_state != new_state {
1519 file_changed = true;
1520 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1521 cx.emit(BufferEvent::ReloadNeeded)
1522 }
1523 }
1524 } else {
1525 file_changed = true;
1526 };
1527
1528 self.file = Some(new_file);
1529 if file_changed {
1530 self.was_changed();
1531 self.non_text_state_update_count += 1;
1532 if was_dirty != self.is_dirty() {
1533 cx.emit(BufferEvent::DirtyChanged);
1534 }
1535 cx.emit(BufferEvent::FileHandleChanged);
1536 cx.notify();
1537 }
1538 }
1539
1540 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1541 Some(self.branch_state.as_ref()?.base_buffer.clone())
1542 }
1543
1544 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1545 pub fn language(&self) -> Option<&Arc<Language>> {
1546 self.language.as_ref()
1547 }
1548
1549 /// Returns the [`Language`] at the given location.
1550 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1551 let offset = position.to_offset(self);
1552 let mut is_first = true;
1553 let start_anchor = self.anchor_before(offset);
1554 let end_anchor = self.anchor_after(offset);
1555 self.syntax_map
1556 .lock()
1557 .layers_for_range(offset..offset, &self.text, false)
1558 .filter(|layer| {
1559 if is_first {
1560 is_first = false;
1561 return true;
1562 }
1563
1564 layer
1565 .included_sub_ranges
1566 .map(|sub_ranges| {
1567 sub_ranges.iter().any(|sub_range| {
1568 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1569 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1570 !is_before_start && !is_after_end
1571 })
1572 })
1573 .unwrap_or(true)
1574 })
1575 .last()
1576 .map(|info| info.language.clone())
1577 .or_else(|| self.language.clone())
1578 }
1579
1580 /// Returns each [`Language`] for the active syntax layers at the given location.
1581 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1582 let offset = position.to_offset(self);
1583 let mut languages: Vec<Arc<Language>> = self
1584 .syntax_map
1585 .lock()
1586 .layers_for_range(offset..offset, &self.text, false)
1587 .map(|info| info.language.clone())
1588 .collect();
1589
1590 if languages.is_empty()
1591 && let Some(buffer_language) = self.language()
1592 {
1593 languages.push(buffer_language.clone());
1594 }
1595
1596 languages
1597 }
1598
1599 /// An integer version number that accounts for all updates besides
1600 /// the buffer's text itself (which is versioned via a version vector).
1601 pub fn non_text_state_update_count(&self) -> usize {
1602 self.non_text_state_update_count
1603 }
1604
1605 /// Whether the buffer is being parsed in the background.
1606 #[cfg(any(test, feature = "test-support"))]
1607 pub fn is_parsing(&self) -> bool {
1608 self.reparse.is_some()
1609 }
1610
1611 /// Indicates whether the buffer contains any regions that may be
1612 /// written in a language that hasn't been loaded yet.
1613 pub fn contains_unknown_injections(&self) -> bool {
1614 self.syntax_map.lock().contains_unknown_injections()
1615 }
1616
1617 #[cfg(any(test, feature = "test-support"))]
1618 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1619 self.sync_parse_timeout = timeout;
1620 }
1621
1622 /// Called after an edit to synchronize the buffer's main parse tree with
1623 /// the buffer's new underlying state.
1624 ///
1625 /// Locks the syntax map and interpolates the edits since the last reparse
1626 /// into the foreground syntax tree.
1627 ///
1628 /// Then takes a stable snapshot of the syntax map before unlocking it.
1629 /// The snapshot with the interpolated edits is sent to a background thread,
1630 /// where we ask Tree-sitter to perform an incremental parse.
1631 ///
1632 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1633 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1634 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1635 ///
1636 /// If we time out waiting on the parse, we spawn a second task waiting
1637 /// until the parse does complete and return with the interpolated tree still
1638 /// in the foreground. When the background parse completes, call back into
1639 /// the main thread and assign the foreground parse state.
1640 ///
1641 /// If the buffer or grammar changed since the start of the background parse,
1642 /// initiate an additional reparse recursively. To avoid concurrent parses
1643 /// for the same buffer, we only initiate a new parse if we are not already
1644 /// parsing in the background.
1645 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1646 if self.reparse.is_some() {
1647 return;
1648 }
1649 let language = if let Some(language) = self.language.clone() {
1650 language
1651 } else {
1652 return;
1653 };
1654
1655 let text = self.text_snapshot();
1656 let parsed_version = self.version();
1657
1658 let mut syntax_map = self.syntax_map.lock();
1659 syntax_map.interpolate(&text);
1660 let language_registry = syntax_map.language_registry();
1661 let mut syntax_snapshot = syntax_map.snapshot();
1662 drop(syntax_map);
1663
1664 let parse_task = cx.background_spawn({
1665 let language = language.clone();
1666 let language_registry = language_registry.clone();
1667 async move {
1668 syntax_snapshot.reparse(&text, language_registry, language);
1669 syntax_snapshot
1670 }
1671 });
1672
1673 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1674 if may_block {
1675 match cx
1676 .background_executor()
1677 .block_with_timeout(self.sync_parse_timeout, parse_task)
1678 {
1679 Ok(new_syntax_snapshot) => {
1680 self.did_finish_parsing(new_syntax_snapshot, cx);
1681 self.reparse = None;
1682 }
1683 Err(parse_task) => {
1684 self.reparse = Some(cx.spawn(async move |this, cx| {
1685 let new_syntax_map = cx.background_spawn(parse_task).await;
1686 this.update(cx, move |this, cx| {
1687 let grammar_changed = || {
1688 this.language.as_ref().is_none_or(|current_language| {
1689 !Arc::ptr_eq(&language, current_language)
1690 })
1691 };
1692 let language_registry_changed = || {
1693 new_syntax_map.contains_unknown_injections()
1694 && language_registry.is_some_and(|registry| {
1695 registry.version()
1696 != new_syntax_map.language_registry_version()
1697 })
1698 };
1699 let parse_again = this.version.changed_since(&parsed_version)
1700 || language_registry_changed()
1701 || grammar_changed();
1702 this.did_finish_parsing(new_syntax_map, cx);
1703 this.reparse = None;
1704 if parse_again {
1705 this.reparse(cx, false);
1706 }
1707 })
1708 .ok();
1709 }));
1710 }
1711 }
1712 } else {
1713 self.reparse = Some(cx.spawn(async move |this, cx| {
1714 let new_syntax_map = cx.background_spawn(parse_task).await;
1715 this.update(cx, move |this, cx| {
1716 let grammar_changed = || {
1717 this.language.as_ref().is_none_or(|current_language| {
1718 !Arc::ptr_eq(&language, current_language)
1719 })
1720 };
1721 let language_registry_changed = || {
1722 new_syntax_map.contains_unknown_injections()
1723 && language_registry.is_some_and(|registry| {
1724 registry.version() != new_syntax_map.language_registry_version()
1725 })
1726 };
1727 let parse_again = this.version.changed_since(&parsed_version)
1728 || language_registry_changed()
1729 || grammar_changed();
1730 this.did_finish_parsing(new_syntax_map, cx);
1731 this.reparse = None;
1732 if parse_again {
1733 this.reparse(cx, false);
1734 }
1735 })
1736 .ok();
1737 }));
1738 }
1739 }
1740
1741 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1742 self.was_changed();
1743 self.non_text_state_update_count += 1;
1744 self.syntax_map.lock().did_parse(syntax_snapshot);
1745 self.request_autoindent(cx);
1746 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1747 self.tree_sitter_data.lock().clear();
1748 cx.emit(BufferEvent::Reparsed);
1749 cx.notify();
1750 }
1751
1752 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1753 self.parse_status.1.clone()
1754 }
1755
1756 /// Wait until the buffer is no longer parsing
1757 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1758 let mut parse_status = self.parse_status();
1759 async move {
1760 while *parse_status.borrow() != ParseStatus::Idle {
1761 if parse_status.changed().await.is_err() {
1762 break;
1763 }
1764 }
1765 }
1766 }
1767
1768 /// Assign to the buffer a set of diagnostics created by a given language server.
1769 pub fn update_diagnostics(
1770 &mut self,
1771 server_id: LanguageServerId,
1772 diagnostics: DiagnosticSet,
1773 cx: &mut Context<Self>,
1774 ) {
1775 let lamport_timestamp = self.text.lamport_clock.tick();
1776 let op = Operation::UpdateDiagnostics {
1777 server_id,
1778 diagnostics: diagnostics.iter().cloned().collect(),
1779 lamport_timestamp,
1780 };
1781
1782 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1783 self.send_operation(op, true, cx);
1784 }
1785
1786 pub fn buffer_diagnostics(
1787 &self,
1788 for_server: Option<LanguageServerId>,
1789 ) -> Vec<&DiagnosticEntry<Anchor>> {
1790 match for_server {
1791 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1792 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1793 Err(_) => Vec::new(),
1794 },
1795 None => self
1796 .diagnostics
1797 .iter()
1798 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1799 .collect(),
1800 }
1801 }
1802
1803 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1804 if let Some(indent_sizes) = self.compute_autoindents() {
1805 let indent_sizes = cx.background_spawn(indent_sizes);
1806 match cx
1807 .background_executor()
1808 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1809 {
1810 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1811 Err(indent_sizes) => {
1812 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1813 let indent_sizes = indent_sizes.await;
1814 this.update(cx, |this, cx| {
1815 this.apply_autoindents(indent_sizes, cx);
1816 })
1817 .ok();
1818 }));
1819 }
1820 }
1821 } else {
1822 self.autoindent_requests.clear();
1823 for tx in self.wait_for_autoindent_txs.drain(..) {
1824 tx.send(()).ok();
1825 }
1826 }
1827 }
1828
1829 fn compute_autoindents(
1830 &self,
1831 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1832 let max_rows_between_yields = 100;
1833 let snapshot = self.snapshot();
1834 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1835 return None;
1836 }
1837
1838 let autoindent_requests = self.autoindent_requests.clone();
1839 Some(async move {
1840 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1841 for request in autoindent_requests {
1842 // Resolve each edited range to its row in the current buffer and in the
1843 // buffer before this batch of edits.
1844 let mut row_ranges = Vec::new();
1845 let mut old_to_new_rows = BTreeMap::new();
1846 let mut language_indent_sizes_by_new_row = Vec::new();
1847 for entry in &request.entries {
1848 let position = entry.range.start;
1849 let new_row = position.to_point(&snapshot).row;
1850 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1851 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1852
1853 if !entry.first_line_is_new {
1854 let old_row = position.to_point(&request.before_edit).row;
1855 old_to_new_rows.insert(old_row, new_row);
1856 }
1857 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1858 }
1859
1860 // Build a map containing the suggested indentation for each of the edited lines
1861 // with respect to the state of the buffer before these edits. This map is keyed
1862 // by the rows for these lines in the current state of the buffer.
1863 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1864 let old_edited_ranges =
1865 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1866 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1867 let mut language_indent_size = IndentSize::default();
1868 for old_edited_range in old_edited_ranges {
1869 let suggestions = request
1870 .before_edit
1871 .suggest_autoindents(old_edited_range.clone())
1872 .into_iter()
1873 .flatten();
1874 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1875 if let Some(suggestion) = suggestion {
1876 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1877
1878 // Find the indent size based on the language for this row.
1879 while let Some((row, size)) = language_indent_sizes.peek() {
1880 if *row > new_row {
1881 break;
1882 }
1883 language_indent_size = *size;
1884 language_indent_sizes.next();
1885 }
1886
1887 let suggested_indent = old_to_new_rows
1888 .get(&suggestion.basis_row)
1889 .and_then(|from_row| {
1890 Some(old_suggestions.get(from_row).copied()?.0)
1891 })
1892 .unwrap_or_else(|| {
1893 request
1894 .before_edit
1895 .indent_size_for_line(suggestion.basis_row)
1896 })
1897 .with_delta(suggestion.delta, language_indent_size);
1898 old_suggestions
1899 .insert(new_row, (suggested_indent, suggestion.within_error));
1900 }
1901 }
1902 yield_now().await;
1903 }
1904
1905 // Compute new suggestions for each line, but only include them in the result
1906 // if they differ from the old suggestion for that line.
1907 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1908 let mut language_indent_size = IndentSize::default();
1909 for (row_range, original_indent_column) in row_ranges {
1910 let new_edited_row_range = if request.is_block_mode {
1911 row_range.start..row_range.start + 1
1912 } else {
1913 row_range.clone()
1914 };
1915
1916 let suggestions = snapshot
1917 .suggest_autoindents(new_edited_row_range.clone())
1918 .into_iter()
1919 .flatten();
1920 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1921 if let Some(suggestion) = suggestion {
1922 // Find the indent size based on the language for this row.
1923 while let Some((row, size)) = language_indent_sizes.peek() {
1924 if *row > new_row {
1925 break;
1926 }
1927 language_indent_size = *size;
1928 language_indent_sizes.next();
1929 }
1930
1931 let suggested_indent = indent_sizes
1932 .get(&suggestion.basis_row)
1933 .copied()
1934 .map(|e| e.0)
1935 .unwrap_or_else(|| {
1936 snapshot.indent_size_for_line(suggestion.basis_row)
1937 })
1938 .with_delta(suggestion.delta, language_indent_size);
1939
1940 if old_suggestions.get(&new_row).is_none_or(
1941 |(old_indentation, was_within_error)| {
1942 suggested_indent != *old_indentation
1943 && (!suggestion.within_error || *was_within_error)
1944 },
1945 ) {
1946 indent_sizes.insert(
1947 new_row,
1948 (suggested_indent, request.ignore_empty_lines),
1949 );
1950 }
1951 }
1952 }
1953
1954 if let (true, Some(original_indent_column)) =
1955 (request.is_block_mode, original_indent_column)
1956 {
1957 let new_indent =
1958 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1959 *indent
1960 } else {
1961 snapshot.indent_size_for_line(row_range.start)
1962 };
1963 let delta = new_indent.len as i64 - original_indent_column as i64;
1964 if delta != 0 {
1965 for row in row_range.skip(1) {
1966 indent_sizes.entry(row).or_insert_with(|| {
1967 let mut size = snapshot.indent_size_for_line(row);
1968 if size.kind == new_indent.kind {
1969 match delta.cmp(&0) {
1970 Ordering::Greater => size.len += delta as u32,
1971 Ordering::Less => {
1972 size.len = size.len.saturating_sub(-delta as u32)
1973 }
1974 Ordering::Equal => {}
1975 }
1976 }
1977 (size, request.ignore_empty_lines)
1978 });
1979 }
1980 }
1981 }
1982
1983 yield_now().await;
1984 }
1985 }
1986
1987 indent_sizes
1988 .into_iter()
1989 .filter_map(|(row, (indent, ignore_empty_lines))| {
1990 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1991 None
1992 } else {
1993 Some((row, indent))
1994 }
1995 })
1996 .collect()
1997 })
1998 }
1999
2000 fn apply_autoindents(
2001 &mut self,
2002 indent_sizes: BTreeMap<u32, IndentSize>,
2003 cx: &mut Context<Self>,
2004 ) {
2005 self.autoindent_requests.clear();
2006 for tx in self.wait_for_autoindent_txs.drain(..) {
2007 tx.send(()).ok();
2008 }
2009
2010 let edits: Vec<_> = indent_sizes
2011 .into_iter()
2012 .filter_map(|(row, indent_size)| {
2013 let current_size = indent_size_for_line(self, row);
2014 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2015 })
2016 .collect();
2017
2018 let preserve_preview = self.preserve_preview();
2019 self.edit(edits, None, cx);
2020 if preserve_preview {
2021 self.refresh_preview();
2022 }
2023 }
2024
2025 /// Create a minimal edit that will cause the given row to be indented
2026 /// with the given size. After applying this edit, the length of the line
2027 /// will always be at least `new_size.len`.
2028 pub fn edit_for_indent_size_adjustment(
2029 row: u32,
2030 current_size: IndentSize,
2031 new_size: IndentSize,
2032 ) -> Option<(Range<Point>, String)> {
2033 if new_size.kind == current_size.kind {
2034 match new_size.len.cmp(¤t_size.len) {
2035 Ordering::Greater => {
2036 let point = Point::new(row, 0);
2037 Some((
2038 point..point,
2039 iter::repeat(new_size.char())
2040 .take((new_size.len - current_size.len) as usize)
2041 .collect::<String>(),
2042 ))
2043 }
2044
2045 Ordering::Less => Some((
2046 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2047 String::new(),
2048 )),
2049
2050 Ordering::Equal => None,
2051 }
2052 } else {
2053 Some((
2054 Point::new(row, 0)..Point::new(row, current_size.len),
2055 iter::repeat(new_size.char())
2056 .take(new_size.len as usize)
2057 .collect::<String>(),
2058 ))
2059 }
2060 }
2061
2062 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2063 /// and the given new text.
2064 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2065 let old_text = self.as_rope().clone();
2066 let base_version = self.version();
2067 cx.background_executor()
2068 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2069 let old_text = old_text.to_string();
2070 let line_ending = LineEnding::detect(&new_text);
2071 LineEnding::normalize(&mut new_text);
2072 let edits = text_diff(&old_text, &new_text);
2073 Diff {
2074 base_version,
2075 line_ending,
2076 edits,
2077 }
2078 })
2079 }
2080
2081 /// Spawns a background task that searches the buffer for any whitespace
2082 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2083 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2084 let old_text = self.as_rope().clone();
2085 let line_ending = self.line_ending();
2086 let base_version = self.version();
2087 cx.background_spawn(async move {
2088 let ranges = trailing_whitespace_ranges(&old_text);
2089 let empty = Arc::<str>::from("");
2090 Diff {
2091 base_version,
2092 line_ending,
2093 edits: ranges
2094 .into_iter()
2095 .map(|range| (range, empty.clone()))
2096 .collect(),
2097 }
2098 })
2099 }
2100
2101 /// Ensures that the buffer ends with a single newline character, and
2102 /// no other whitespace. Skips if the buffer is empty.
2103 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2104 let len = self.len();
2105 if len == 0 {
2106 return;
2107 }
2108 let mut offset = len;
2109 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2110 let non_whitespace_len = chunk
2111 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2112 .len();
2113 offset -= chunk.len();
2114 offset += non_whitespace_len;
2115 if non_whitespace_len != 0 {
2116 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2117 return;
2118 }
2119 break;
2120 }
2121 }
2122 self.edit([(offset..len, "\n")], None, cx);
2123 }
2124
2125 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2126 /// calculated, then adjust the diff to account for those changes, and discard any
2127 /// parts of the diff that conflict with those changes.
2128 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2129 let snapshot = self.snapshot();
2130 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2131 let mut delta = 0;
2132 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2133 while let Some(edit_since) = edits_since.peek() {
2134 // If the edit occurs after a diff hunk, then it does not
2135 // affect that hunk.
2136 if edit_since.old.start > range.end {
2137 break;
2138 }
2139 // If the edit precedes the diff hunk, then adjust the hunk
2140 // to reflect the edit.
2141 else if edit_since.old.end < range.start {
2142 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2143 edits_since.next();
2144 }
2145 // If the edit intersects a diff hunk, then discard that hunk.
2146 else {
2147 return None;
2148 }
2149 }
2150
2151 let start = (range.start as i64 + delta) as usize;
2152 let end = (range.end as i64 + delta) as usize;
2153 Some((start..end, new_text))
2154 });
2155
2156 self.start_transaction();
2157 self.text.set_line_ending(diff.line_ending);
2158 self.edit(adjusted_edits, None, cx);
2159 self.end_transaction(cx)
2160 }
2161
2162 pub fn has_unsaved_edits(&self) -> bool {
2163 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2164
2165 if last_version == self.version {
2166 self.has_unsaved_edits
2167 .set((last_version, has_unsaved_edits));
2168 return has_unsaved_edits;
2169 }
2170
2171 let has_edits = self.has_edits_since(&self.saved_version);
2172 self.has_unsaved_edits
2173 .set((self.version.clone(), has_edits));
2174 has_edits
2175 }
2176
2177 /// Checks if the buffer has unsaved changes.
2178 pub fn is_dirty(&self) -> bool {
2179 if self.capability == Capability::ReadOnly {
2180 return false;
2181 }
2182 if self.has_conflict {
2183 return true;
2184 }
2185 match self.file.as_ref().map(|f| f.disk_state()) {
2186 Some(DiskState::New) | Some(DiskState::Deleted) => {
2187 !self.is_empty() && self.has_unsaved_edits()
2188 }
2189 _ => self.has_unsaved_edits(),
2190 }
2191 }
2192
2193 /// Marks the buffer as having a conflict regardless of current buffer state.
2194 pub fn set_conflict(&mut self) {
2195 self.has_conflict = true;
2196 }
2197
2198 /// Checks if the buffer and its file have both changed since the buffer
2199 /// was last saved or reloaded.
2200 pub fn has_conflict(&self) -> bool {
2201 if self.has_conflict {
2202 return true;
2203 }
2204 let Some(file) = self.file.as_ref() else {
2205 return false;
2206 };
2207 match file.disk_state() {
2208 DiskState::New => false,
2209 DiskState::Present { mtime } => match self.saved_mtime {
2210 Some(saved_mtime) => {
2211 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2212 }
2213 None => true,
2214 },
2215 DiskState::Deleted => false,
2216 }
2217 }
2218
2219 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2220 pub fn subscribe(&mut self) -> Subscription<usize> {
2221 self.text.subscribe()
2222 }
2223
2224 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2225 ///
2226 /// This allows downstream code to check if the buffer's text has changed without
2227 /// waiting for an effect cycle, which would be required if using eents.
2228 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2229 if let Err(ix) = self
2230 .change_bits
2231 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2232 {
2233 self.change_bits.insert(ix, bit);
2234 }
2235 }
2236
2237 /// Set the change bit for all "listeners".
2238 fn was_changed(&mut self) {
2239 self.change_bits.retain(|change_bit| {
2240 change_bit
2241 .upgrade()
2242 .inspect(|bit| {
2243 _ = bit.replace(true);
2244 })
2245 .is_some()
2246 });
2247 }
2248
2249 /// Starts a transaction, if one is not already in-progress. When undoing or
2250 /// redoing edits, all of the edits performed within a transaction are undone
2251 /// or redone together.
2252 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2253 self.start_transaction_at(Instant::now())
2254 }
2255
2256 /// Starts a transaction, providing the current time. Subsequent transactions
2257 /// that occur within a short period of time will be grouped together. This
2258 /// is controlled by the buffer's undo grouping duration.
2259 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2260 self.transaction_depth += 1;
2261 if self.was_dirty_before_starting_transaction.is_none() {
2262 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2263 }
2264 self.text.start_transaction_at(now)
2265 }
2266
2267 /// Terminates the current transaction, if this is the outermost transaction.
2268 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2269 self.end_transaction_at(Instant::now(), cx)
2270 }
2271
2272 /// Terminates the current transaction, providing the current time. Subsequent transactions
2273 /// that occur within a short period of time will be grouped together. This
2274 /// is controlled by the buffer's undo grouping duration.
2275 pub fn end_transaction_at(
2276 &mut self,
2277 now: Instant,
2278 cx: &mut Context<Self>,
2279 ) -> Option<TransactionId> {
2280 assert!(self.transaction_depth > 0);
2281 self.transaction_depth -= 1;
2282 let was_dirty = if self.transaction_depth == 0 {
2283 self.was_dirty_before_starting_transaction.take().unwrap()
2284 } else {
2285 false
2286 };
2287 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2288 self.did_edit(&start_version, was_dirty, cx);
2289 Some(transaction_id)
2290 } else {
2291 None
2292 }
2293 }
2294
2295 /// Manually add a transaction to the buffer's undo history.
2296 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2297 self.text.push_transaction(transaction, now);
2298 }
2299
2300 /// Differs from `push_transaction` in that it does not clear the redo
2301 /// stack. Intended to be used to create a parent transaction to merge
2302 /// potential child transactions into.
2303 ///
2304 /// The caller is responsible for removing it from the undo history using
2305 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2306 /// are merged into this transaction, the caller is responsible for ensuring
2307 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2308 /// cleared is to create transactions with the usual `start_transaction` and
2309 /// `end_transaction` methods and merging the resulting transactions into
2310 /// the transaction created by this method
2311 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2312 self.text.push_empty_transaction(now)
2313 }
2314
2315 /// Prevent the last transaction from being grouped with any subsequent transactions,
2316 /// even if they occur with the buffer's undo grouping duration.
2317 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2318 self.text.finalize_last_transaction()
2319 }
2320
2321 /// Manually group all changes since a given transaction.
2322 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2323 self.text.group_until_transaction(transaction_id);
2324 }
2325
2326 /// Manually remove a transaction from the buffer's undo history
2327 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2328 self.text.forget_transaction(transaction_id)
2329 }
2330
2331 /// Retrieve a transaction from the buffer's undo history
2332 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2333 self.text.get_transaction(transaction_id)
2334 }
2335
2336 /// Manually merge two transactions in the buffer's undo history.
2337 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2338 self.text.merge_transactions(transaction, destination);
2339 }
2340
2341 /// Waits for the buffer to receive operations with the given timestamps.
2342 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2343 &mut self,
2344 edit_ids: It,
2345 ) -> impl Future<Output = Result<()>> + use<It> {
2346 self.text.wait_for_edits(edit_ids)
2347 }
2348
2349 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2350 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2351 &mut self,
2352 anchors: It,
2353 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2354 self.text.wait_for_anchors(anchors)
2355 }
2356
2357 /// Waits for the buffer to receive operations up to the given version.
2358 pub fn wait_for_version(
2359 &mut self,
2360 version: clock::Global,
2361 ) -> impl Future<Output = Result<()>> + use<> {
2362 self.text.wait_for_version(version)
2363 }
2364
2365 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2366 /// [`Buffer::wait_for_version`] to resolve with an error.
2367 pub fn give_up_waiting(&mut self) {
2368 self.text.give_up_waiting();
2369 }
2370
2371 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2372 let mut rx = None;
2373 if !self.autoindent_requests.is_empty() {
2374 let channel = oneshot::channel();
2375 self.wait_for_autoindent_txs.push(channel.0);
2376 rx = Some(channel.1);
2377 }
2378 rx
2379 }
2380
2381 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2382 pub fn set_active_selections(
2383 &mut self,
2384 selections: Arc<[Selection<Anchor>]>,
2385 line_mode: bool,
2386 cursor_shape: CursorShape,
2387 cx: &mut Context<Self>,
2388 ) {
2389 let lamport_timestamp = self.text.lamport_clock.tick();
2390 self.remote_selections.insert(
2391 self.text.replica_id(),
2392 SelectionSet {
2393 selections: selections.clone(),
2394 lamport_timestamp,
2395 line_mode,
2396 cursor_shape,
2397 },
2398 );
2399 self.send_operation(
2400 Operation::UpdateSelections {
2401 selections,
2402 line_mode,
2403 lamport_timestamp,
2404 cursor_shape,
2405 },
2406 true,
2407 cx,
2408 );
2409 self.non_text_state_update_count += 1;
2410 cx.notify();
2411 }
2412
2413 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2414 /// this replica.
2415 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2416 if self
2417 .remote_selections
2418 .get(&self.text.replica_id())
2419 .is_none_or(|set| !set.selections.is_empty())
2420 {
2421 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2422 }
2423 }
2424
2425 pub fn set_agent_selections(
2426 &mut self,
2427 selections: Arc<[Selection<Anchor>]>,
2428 line_mode: bool,
2429 cursor_shape: CursorShape,
2430 cx: &mut Context<Self>,
2431 ) {
2432 let lamport_timestamp = self.text.lamport_clock.tick();
2433 self.remote_selections.insert(
2434 ReplicaId::AGENT,
2435 SelectionSet {
2436 selections,
2437 lamport_timestamp,
2438 line_mode,
2439 cursor_shape,
2440 },
2441 );
2442 self.non_text_state_update_count += 1;
2443 cx.notify();
2444 }
2445
2446 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2447 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2448 }
2449
2450 /// Replaces the buffer's entire text.
2451 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2452 where
2453 T: Into<Arc<str>>,
2454 {
2455 self.autoindent_requests.clear();
2456 self.edit([(0..self.len(), text)], None, cx)
2457 }
2458
2459 /// Appends the given text to the end of the buffer.
2460 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2461 where
2462 T: Into<Arc<str>>,
2463 {
2464 self.edit([(self.len()..self.len(), text)], None, cx)
2465 }
2466
2467 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2468 /// delete, and a string of text to insert at that location.
2469 ///
2470 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2471 /// request for the edited ranges, which will be processed when the buffer finishes
2472 /// parsing.
2473 ///
2474 /// Parsing takes place at the end of a transaction, and may compute synchronously
2475 /// or asynchronously, depending on the changes.
2476 pub fn edit<I, S, T>(
2477 &mut self,
2478 edits_iter: I,
2479 autoindent_mode: Option<AutoindentMode>,
2480 cx: &mut Context<Self>,
2481 ) -> Option<clock::Lamport>
2482 where
2483 I: IntoIterator<Item = (Range<S>, T)>,
2484 S: ToOffset,
2485 T: Into<Arc<str>>,
2486 {
2487 // Skip invalid edits and coalesce contiguous ones.
2488 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2489
2490 for (range, new_text) in edits_iter {
2491 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2492
2493 if range.start > range.end {
2494 mem::swap(&mut range.start, &mut range.end);
2495 }
2496 let new_text = new_text.into();
2497 if !new_text.is_empty() || !range.is_empty() {
2498 if let Some((prev_range, prev_text)) = edits.last_mut()
2499 && prev_range.end >= range.start
2500 {
2501 prev_range.end = cmp::max(prev_range.end, range.end);
2502 *prev_text = format!("{prev_text}{new_text}").into();
2503 } else {
2504 edits.push((range, new_text));
2505 }
2506 }
2507 }
2508 if edits.is_empty() {
2509 return None;
2510 }
2511
2512 self.start_transaction();
2513 self.pending_autoindent.take();
2514 let autoindent_request = autoindent_mode
2515 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2516
2517 let edit_operation = self.text.edit(edits.iter().cloned());
2518 let edit_id = edit_operation.timestamp();
2519
2520 if let Some((before_edit, mode)) = autoindent_request {
2521 let mut delta = 0isize;
2522 let mut previous_setting = None;
2523 let entries: Vec<_> = edits
2524 .into_iter()
2525 .enumerate()
2526 .zip(&edit_operation.as_edit().unwrap().new_text)
2527 .filter(|((_, (range, _)), _)| {
2528 let language = before_edit.language_at(range.start);
2529 let language_id = language.map(|l| l.id());
2530 if let Some((cached_language_id, auto_indent)) = previous_setting
2531 && cached_language_id == language_id
2532 {
2533 auto_indent
2534 } else {
2535 // The auto-indent setting is not present in editorconfigs, hence
2536 // we can avoid passing the file here.
2537 let auto_indent =
2538 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2539 previous_setting = Some((language_id, auto_indent));
2540 auto_indent
2541 }
2542 })
2543 .map(|((ix, (range, _)), new_text)| {
2544 let new_text_length = new_text.len();
2545 let old_start = range.start.to_point(&before_edit);
2546 let new_start = (delta + range.start as isize) as usize;
2547 let range_len = range.end - range.start;
2548 delta += new_text_length as isize - range_len as isize;
2549
2550 // Decide what range of the insertion to auto-indent, and whether
2551 // the first line of the insertion should be considered a newly-inserted line
2552 // or an edit to an existing line.
2553 let mut range_of_insertion_to_indent = 0..new_text_length;
2554 let mut first_line_is_new = true;
2555
2556 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2557 let old_line_end = before_edit.line_len(old_start.row);
2558
2559 if old_start.column > old_line_start {
2560 first_line_is_new = false;
2561 }
2562
2563 if !new_text.contains('\n')
2564 && (old_start.column + (range_len as u32) < old_line_end
2565 || old_line_end == old_line_start)
2566 {
2567 first_line_is_new = false;
2568 }
2569
2570 // When inserting text starting with a newline, avoid auto-indenting the
2571 // previous line.
2572 if new_text.starts_with('\n') {
2573 range_of_insertion_to_indent.start += 1;
2574 first_line_is_new = true;
2575 }
2576
2577 let mut original_indent_column = None;
2578 if let AutoindentMode::Block {
2579 original_indent_columns,
2580 } = &mode
2581 {
2582 original_indent_column = Some(if new_text.starts_with('\n') {
2583 indent_size_for_text(
2584 new_text[range_of_insertion_to_indent.clone()].chars(),
2585 )
2586 .len
2587 } else {
2588 original_indent_columns
2589 .get(ix)
2590 .copied()
2591 .flatten()
2592 .unwrap_or_else(|| {
2593 indent_size_for_text(
2594 new_text[range_of_insertion_to_indent.clone()].chars(),
2595 )
2596 .len
2597 })
2598 });
2599
2600 // Avoid auto-indenting the line after the edit.
2601 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2602 range_of_insertion_to_indent.end -= 1;
2603 }
2604 }
2605
2606 AutoindentRequestEntry {
2607 first_line_is_new,
2608 original_indent_column,
2609 indent_size: before_edit.language_indent_size_at(range.start, cx),
2610 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2611 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2612 }
2613 })
2614 .collect();
2615
2616 if !entries.is_empty() {
2617 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2618 before_edit,
2619 entries,
2620 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2621 ignore_empty_lines: false,
2622 }));
2623 }
2624 }
2625
2626 self.end_transaction(cx);
2627 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2628 Some(edit_id)
2629 }
2630
2631 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2632 self.was_changed();
2633
2634 if self.edits_since::<usize>(old_version).next().is_none() {
2635 return;
2636 }
2637
2638 self.reparse(cx, true);
2639 cx.emit(BufferEvent::Edited);
2640 if was_dirty != self.is_dirty() {
2641 cx.emit(BufferEvent::DirtyChanged);
2642 }
2643 cx.notify();
2644 }
2645
2646 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2647 where
2648 I: IntoIterator<Item = Range<T>>,
2649 T: ToOffset + Copy,
2650 {
2651 let before_edit = self.snapshot();
2652 let entries = ranges
2653 .into_iter()
2654 .map(|range| AutoindentRequestEntry {
2655 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2656 first_line_is_new: true,
2657 indent_size: before_edit.language_indent_size_at(range.start, cx),
2658 original_indent_column: None,
2659 })
2660 .collect();
2661 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2662 before_edit,
2663 entries,
2664 is_block_mode: false,
2665 ignore_empty_lines: true,
2666 }));
2667 self.request_autoindent(cx);
2668 }
2669
2670 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2671 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2672 pub fn insert_empty_line(
2673 &mut self,
2674 position: impl ToPoint,
2675 space_above: bool,
2676 space_below: bool,
2677 cx: &mut Context<Self>,
2678 ) -> Point {
2679 let mut position = position.to_point(self);
2680
2681 self.start_transaction();
2682
2683 self.edit(
2684 [(position..position, "\n")],
2685 Some(AutoindentMode::EachLine),
2686 cx,
2687 );
2688
2689 if position.column > 0 {
2690 position += Point::new(1, 0);
2691 }
2692
2693 if !self.is_line_blank(position.row) {
2694 self.edit(
2695 [(position..position, "\n")],
2696 Some(AutoindentMode::EachLine),
2697 cx,
2698 );
2699 }
2700
2701 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2702 self.edit(
2703 [(position..position, "\n")],
2704 Some(AutoindentMode::EachLine),
2705 cx,
2706 );
2707 position.row += 1;
2708 }
2709
2710 if space_below
2711 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2712 {
2713 self.edit(
2714 [(position..position, "\n")],
2715 Some(AutoindentMode::EachLine),
2716 cx,
2717 );
2718 }
2719
2720 self.end_transaction(cx);
2721
2722 position
2723 }
2724
2725 /// Applies the given remote operations to the buffer.
2726 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2727 self.pending_autoindent.take();
2728 let was_dirty = self.is_dirty();
2729 let old_version = self.version.clone();
2730 let mut deferred_ops = Vec::new();
2731 let buffer_ops = ops
2732 .into_iter()
2733 .filter_map(|op| match op {
2734 Operation::Buffer(op) => Some(op),
2735 _ => {
2736 if self.can_apply_op(&op) {
2737 self.apply_op(op, cx);
2738 } else {
2739 deferred_ops.push(op);
2740 }
2741 None
2742 }
2743 })
2744 .collect::<Vec<_>>();
2745 for operation in buffer_ops.iter() {
2746 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2747 }
2748 self.text.apply_ops(buffer_ops);
2749 self.deferred_ops.insert(deferred_ops);
2750 self.flush_deferred_ops(cx);
2751 self.did_edit(&old_version, was_dirty, cx);
2752 // Notify independently of whether the buffer was edited as the operations could include a
2753 // selection update.
2754 cx.notify();
2755 }
2756
2757 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2758 let mut deferred_ops = Vec::new();
2759 for op in self.deferred_ops.drain().iter().cloned() {
2760 if self.can_apply_op(&op) {
2761 self.apply_op(op, cx);
2762 } else {
2763 deferred_ops.push(op);
2764 }
2765 }
2766 self.deferred_ops.insert(deferred_ops);
2767 }
2768
2769 pub fn has_deferred_ops(&self) -> bool {
2770 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2771 }
2772
2773 fn can_apply_op(&self, operation: &Operation) -> bool {
2774 match operation {
2775 Operation::Buffer(_) => {
2776 unreachable!("buffer operations should never be applied at this layer")
2777 }
2778 Operation::UpdateDiagnostics {
2779 diagnostics: diagnostic_set,
2780 ..
2781 } => diagnostic_set.iter().all(|diagnostic| {
2782 self.text.can_resolve(&diagnostic.range.start)
2783 && self.text.can_resolve(&diagnostic.range.end)
2784 }),
2785 Operation::UpdateSelections { selections, .. } => selections
2786 .iter()
2787 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2788 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2789 }
2790 }
2791
2792 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2793 match operation {
2794 Operation::Buffer(_) => {
2795 unreachable!("buffer operations should never be applied at this layer")
2796 }
2797 Operation::UpdateDiagnostics {
2798 server_id,
2799 diagnostics: diagnostic_set,
2800 lamport_timestamp,
2801 } => {
2802 let snapshot = self.snapshot();
2803 self.apply_diagnostic_update(
2804 server_id,
2805 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2806 lamport_timestamp,
2807 cx,
2808 );
2809 }
2810 Operation::UpdateSelections {
2811 selections,
2812 lamport_timestamp,
2813 line_mode,
2814 cursor_shape,
2815 } => {
2816 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2817 && set.lamport_timestamp > lamport_timestamp
2818 {
2819 return;
2820 }
2821
2822 self.remote_selections.insert(
2823 lamport_timestamp.replica_id,
2824 SelectionSet {
2825 selections,
2826 lamport_timestamp,
2827 line_mode,
2828 cursor_shape,
2829 },
2830 );
2831 self.text.lamport_clock.observe(lamport_timestamp);
2832 self.non_text_state_update_count += 1;
2833 }
2834 Operation::UpdateCompletionTriggers {
2835 triggers,
2836 lamport_timestamp,
2837 server_id,
2838 } => {
2839 if triggers.is_empty() {
2840 self.completion_triggers_per_language_server
2841 .remove(&server_id);
2842 self.completion_triggers = self
2843 .completion_triggers_per_language_server
2844 .values()
2845 .flat_map(|triggers| triggers.iter().cloned())
2846 .collect();
2847 } else {
2848 self.completion_triggers_per_language_server
2849 .insert(server_id, triggers.iter().cloned().collect());
2850 self.completion_triggers.extend(triggers);
2851 }
2852 self.text.lamport_clock.observe(lamport_timestamp);
2853 }
2854 Operation::UpdateLineEnding {
2855 line_ending,
2856 lamport_timestamp,
2857 } => {
2858 self.text.set_line_ending(line_ending);
2859 self.text.lamport_clock.observe(lamport_timestamp);
2860 }
2861 }
2862 }
2863
2864 fn apply_diagnostic_update(
2865 &mut self,
2866 server_id: LanguageServerId,
2867 diagnostics: DiagnosticSet,
2868 lamport_timestamp: clock::Lamport,
2869 cx: &mut Context<Self>,
2870 ) {
2871 if lamport_timestamp > self.diagnostics_timestamp {
2872 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2873 if diagnostics.is_empty() {
2874 if let Ok(ix) = ix {
2875 self.diagnostics.remove(ix);
2876 }
2877 } else {
2878 match ix {
2879 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2880 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2881 };
2882 }
2883 self.diagnostics_timestamp = lamport_timestamp;
2884 self.non_text_state_update_count += 1;
2885 self.text.lamport_clock.observe(lamport_timestamp);
2886 cx.notify();
2887 cx.emit(BufferEvent::DiagnosticsUpdated);
2888 }
2889 }
2890
2891 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2892 self.was_changed();
2893 cx.emit(BufferEvent::Operation {
2894 operation,
2895 is_local,
2896 });
2897 }
2898
2899 /// Removes the selections for a given peer.
2900 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2901 self.remote_selections.remove(&replica_id);
2902 cx.notify();
2903 }
2904
2905 /// Undoes the most recent transaction.
2906 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2907 let was_dirty = self.is_dirty();
2908 let old_version = self.version.clone();
2909
2910 if let Some((transaction_id, operation)) = self.text.undo() {
2911 self.send_operation(Operation::Buffer(operation), true, cx);
2912 self.did_edit(&old_version, was_dirty, cx);
2913 Some(transaction_id)
2914 } else {
2915 None
2916 }
2917 }
2918
2919 /// Manually undoes a specific transaction in the buffer's undo history.
2920 pub fn undo_transaction(
2921 &mut self,
2922 transaction_id: TransactionId,
2923 cx: &mut Context<Self>,
2924 ) -> bool {
2925 let was_dirty = self.is_dirty();
2926 let old_version = self.version.clone();
2927 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2928 self.send_operation(Operation::Buffer(operation), true, cx);
2929 self.did_edit(&old_version, was_dirty, cx);
2930 true
2931 } else {
2932 false
2933 }
2934 }
2935
2936 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2937 pub fn undo_to_transaction(
2938 &mut self,
2939 transaction_id: TransactionId,
2940 cx: &mut Context<Self>,
2941 ) -> bool {
2942 let was_dirty = self.is_dirty();
2943 let old_version = self.version.clone();
2944
2945 let operations = self.text.undo_to_transaction(transaction_id);
2946 let undone = !operations.is_empty();
2947 for operation in operations {
2948 self.send_operation(Operation::Buffer(operation), true, cx);
2949 }
2950 if undone {
2951 self.did_edit(&old_version, was_dirty, cx)
2952 }
2953 undone
2954 }
2955
2956 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2957 let was_dirty = self.is_dirty();
2958 let operation = self.text.undo_operations(counts);
2959 let old_version = self.version.clone();
2960 self.send_operation(Operation::Buffer(operation), true, cx);
2961 self.did_edit(&old_version, was_dirty, cx);
2962 }
2963
2964 /// Manually redoes a specific transaction in the buffer's redo history.
2965 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2966 let was_dirty = self.is_dirty();
2967 let old_version = self.version.clone();
2968
2969 if let Some((transaction_id, operation)) = self.text.redo() {
2970 self.send_operation(Operation::Buffer(operation), true, cx);
2971 self.did_edit(&old_version, was_dirty, cx);
2972 Some(transaction_id)
2973 } else {
2974 None
2975 }
2976 }
2977
2978 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2979 pub fn redo_to_transaction(
2980 &mut self,
2981 transaction_id: TransactionId,
2982 cx: &mut Context<Self>,
2983 ) -> bool {
2984 let was_dirty = self.is_dirty();
2985 let old_version = self.version.clone();
2986
2987 let operations = self.text.redo_to_transaction(transaction_id);
2988 let redone = !operations.is_empty();
2989 for operation in operations {
2990 self.send_operation(Operation::Buffer(operation), true, cx);
2991 }
2992 if redone {
2993 self.did_edit(&old_version, was_dirty, cx)
2994 }
2995 redone
2996 }
2997
2998 /// Override current completion triggers with the user-provided completion triggers.
2999 pub fn set_completion_triggers(
3000 &mut self,
3001 server_id: LanguageServerId,
3002 triggers: BTreeSet<String>,
3003 cx: &mut Context<Self>,
3004 ) {
3005 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3006 if triggers.is_empty() {
3007 self.completion_triggers_per_language_server
3008 .remove(&server_id);
3009 self.completion_triggers = self
3010 .completion_triggers_per_language_server
3011 .values()
3012 .flat_map(|triggers| triggers.iter().cloned())
3013 .collect();
3014 } else {
3015 self.completion_triggers_per_language_server
3016 .insert(server_id, triggers.clone());
3017 self.completion_triggers.extend(triggers.iter().cloned());
3018 }
3019 self.send_operation(
3020 Operation::UpdateCompletionTriggers {
3021 triggers: triggers.into_iter().collect(),
3022 lamport_timestamp: self.completion_triggers_timestamp,
3023 server_id,
3024 },
3025 true,
3026 cx,
3027 );
3028 cx.notify();
3029 }
3030
3031 /// Returns a list of strings which trigger a completion menu for this language.
3032 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3033 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3034 &self.completion_triggers
3035 }
3036
3037 /// Call this directly after performing edits to prevent the preview tab
3038 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3039 /// to return false until there are additional edits.
3040 pub fn refresh_preview(&mut self) {
3041 self.preview_version = self.version.clone();
3042 }
3043
3044 /// Whether we should preserve the preview status of a tab containing this buffer.
3045 pub fn preserve_preview(&self) -> bool {
3046 !self.has_edits_since(&self.preview_version)
3047 }
3048}
3049
3050#[doc(hidden)]
3051#[cfg(any(test, feature = "test-support"))]
3052impl Buffer {
3053 pub fn edit_via_marked_text(
3054 &mut self,
3055 marked_string: &str,
3056 autoindent_mode: Option<AutoindentMode>,
3057 cx: &mut Context<Self>,
3058 ) {
3059 let edits = self.edits_for_marked_text(marked_string);
3060 self.edit(edits, autoindent_mode, cx);
3061 }
3062
3063 pub fn set_group_interval(&mut self, group_interval: Duration) {
3064 self.text.set_group_interval(group_interval);
3065 }
3066
3067 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3068 where
3069 T: rand::Rng,
3070 {
3071 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3072 let mut last_end = None;
3073 for _ in 0..old_range_count {
3074 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3075 break;
3076 }
3077
3078 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3079 let mut range = self.random_byte_range(new_start, rng);
3080 if rng.random_bool(0.2) {
3081 mem::swap(&mut range.start, &mut range.end);
3082 }
3083 last_end = Some(range.end);
3084
3085 let new_text_len = rng.random_range(0..10);
3086 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3087 new_text = new_text.to_uppercase();
3088
3089 edits.push((range, new_text));
3090 }
3091 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3092 self.edit(edits, None, cx);
3093 }
3094
3095 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3096 let was_dirty = self.is_dirty();
3097 let old_version = self.version.clone();
3098
3099 let ops = self.text.randomly_undo_redo(rng);
3100 if !ops.is_empty() {
3101 for op in ops {
3102 self.send_operation(Operation::Buffer(op), true, cx);
3103 self.did_edit(&old_version, was_dirty, cx);
3104 }
3105 }
3106 }
3107}
3108
3109impl EventEmitter<BufferEvent> for Buffer {}
3110
3111impl Deref for Buffer {
3112 type Target = TextBuffer;
3113
3114 fn deref(&self) -> &Self::Target {
3115 &self.text
3116 }
3117}
3118
3119impl BufferSnapshot {
3120 /// Returns [`IndentSize`] for a given line that respects user settings and
3121 /// language preferences.
3122 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3123 indent_size_for_line(self, row)
3124 }
3125
3126 /// Returns [`IndentSize`] for a given position that respects user settings
3127 /// and language preferences.
3128 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3129 let settings = language_settings(
3130 self.language_at(position).map(|l| l.name()),
3131 self.file(),
3132 cx,
3133 );
3134 if settings.hard_tabs {
3135 IndentSize::tab()
3136 } else {
3137 IndentSize::spaces(settings.tab_size.get())
3138 }
3139 }
3140
3141 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3142 /// is passed in as `single_indent_size`.
3143 pub fn suggested_indents(
3144 &self,
3145 rows: impl Iterator<Item = u32>,
3146 single_indent_size: IndentSize,
3147 ) -> BTreeMap<u32, IndentSize> {
3148 let mut result = BTreeMap::new();
3149
3150 for row_range in contiguous_ranges(rows, 10) {
3151 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3152 Some(suggestions) => suggestions,
3153 _ => break,
3154 };
3155
3156 for (row, suggestion) in row_range.zip(suggestions) {
3157 let indent_size = if let Some(suggestion) = suggestion {
3158 result
3159 .get(&suggestion.basis_row)
3160 .copied()
3161 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3162 .with_delta(suggestion.delta, single_indent_size)
3163 } else {
3164 self.indent_size_for_line(row)
3165 };
3166
3167 result.insert(row, indent_size);
3168 }
3169 }
3170
3171 result
3172 }
3173
3174 fn suggest_autoindents(
3175 &self,
3176 row_range: Range<u32>,
3177 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3178 let config = &self.language.as_ref()?.config;
3179 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3180
3181 #[derive(Debug, Clone)]
3182 struct StartPosition {
3183 start: Point,
3184 suffix: SharedString,
3185 }
3186
3187 // Find the suggested indentation ranges based on the syntax tree.
3188 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3189 let end = Point::new(row_range.end, 0);
3190 let range = (start..end).to_offset(&self.text);
3191 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3192 Some(&grammar.indents_config.as_ref()?.query)
3193 });
3194 let indent_configs = matches
3195 .grammars()
3196 .iter()
3197 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3198 .collect::<Vec<_>>();
3199
3200 let mut indent_ranges = Vec::<Range<Point>>::new();
3201 let mut start_positions = Vec::<StartPosition>::new();
3202 let mut outdent_positions = Vec::<Point>::new();
3203 while let Some(mat) = matches.peek() {
3204 let mut start: Option<Point> = None;
3205 let mut end: Option<Point> = None;
3206
3207 let config = indent_configs[mat.grammar_index];
3208 for capture in mat.captures {
3209 if capture.index == config.indent_capture_ix {
3210 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3211 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3212 } else if Some(capture.index) == config.start_capture_ix {
3213 start = Some(Point::from_ts_point(capture.node.end_position()));
3214 } else if Some(capture.index) == config.end_capture_ix {
3215 end = Some(Point::from_ts_point(capture.node.start_position()));
3216 } else if Some(capture.index) == config.outdent_capture_ix {
3217 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3218 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3219 start_positions.push(StartPosition {
3220 start: Point::from_ts_point(capture.node.start_position()),
3221 suffix: suffix.clone(),
3222 });
3223 }
3224 }
3225
3226 matches.advance();
3227 if let Some((start, end)) = start.zip(end) {
3228 if start.row == end.row {
3229 continue;
3230 }
3231 let range = start..end;
3232 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3233 Err(ix) => indent_ranges.insert(ix, range),
3234 Ok(ix) => {
3235 let prev_range = &mut indent_ranges[ix];
3236 prev_range.end = prev_range.end.max(range.end);
3237 }
3238 }
3239 }
3240 }
3241
3242 let mut error_ranges = Vec::<Range<Point>>::new();
3243 let mut matches = self
3244 .syntax
3245 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3246 while let Some(mat) = matches.peek() {
3247 let node = mat.captures[0].node;
3248 let start = Point::from_ts_point(node.start_position());
3249 let end = Point::from_ts_point(node.end_position());
3250 let range = start..end;
3251 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3252 Ok(ix) | Err(ix) => ix,
3253 };
3254 let mut end_ix = ix;
3255 while let Some(existing_range) = error_ranges.get(end_ix) {
3256 if existing_range.end < end {
3257 end_ix += 1;
3258 } else {
3259 break;
3260 }
3261 }
3262 error_ranges.splice(ix..end_ix, [range]);
3263 matches.advance();
3264 }
3265
3266 outdent_positions.sort();
3267 for outdent_position in outdent_positions {
3268 // find the innermost indent range containing this outdent_position
3269 // set its end to the outdent position
3270 if let Some(range_to_truncate) = indent_ranges
3271 .iter_mut()
3272 .filter(|indent_range| indent_range.contains(&outdent_position))
3273 .next_back()
3274 {
3275 range_to_truncate.end = outdent_position;
3276 }
3277 }
3278
3279 start_positions.sort_by_key(|b| b.start);
3280
3281 // Find the suggested indentation increases and decreased based on regexes.
3282 let mut regex_outdent_map = HashMap::default();
3283 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3284 let mut start_positions_iter = start_positions.iter().peekable();
3285
3286 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3287 self.for_each_line(
3288 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3289 ..Point::new(row_range.end, 0),
3290 |row, line| {
3291 if config
3292 .decrease_indent_pattern
3293 .as_ref()
3294 .is_some_and(|regex| regex.is_match(line))
3295 {
3296 indent_change_rows.push((row, Ordering::Less));
3297 }
3298 if config
3299 .increase_indent_pattern
3300 .as_ref()
3301 .is_some_and(|regex| regex.is_match(line))
3302 {
3303 indent_change_rows.push((row + 1, Ordering::Greater));
3304 }
3305 while let Some(pos) = start_positions_iter.peek() {
3306 if pos.start.row < row {
3307 let pos = start_positions_iter.next().unwrap();
3308 last_seen_suffix
3309 .entry(pos.suffix.to_string())
3310 .or_default()
3311 .push(pos.start);
3312 } else {
3313 break;
3314 }
3315 }
3316 for rule in &config.decrease_indent_patterns {
3317 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3318 let row_start_column = self.indent_size_for_line(row).len;
3319 let basis_row = rule
3320 .valid_after
3321 .iter()
3322 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3323 .flatten()
3324 .filter(|start_point| start_point.column <= row_start_column)
3325 .max_by_key(|start_point| start_point.row);
3326 if let Some(outdent_to_row) = basis_row {
3327 regex_outdent_map.insert(row, outdent_to_row.row);
3328 }
3329 break;
3330 }
3331 }
3332 },
3333 );
3334
3335 let mut indent_changes = indent_change_rows.into_iter().peekable();
3336 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3337 prev_non_blank_row.unwrap_or(0)
3338 } else {
3339 row_range.start.saturating_sub(1)
3340 };
3341
3342 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3343 Some(row_range.map(move |row| {
3344 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3345
3346 let mut indent_from_prev_row = false;
3347 let mut outdent_from_prev_row = false;
3348 let mut outdent_to_row = u32::MAX;
3349 let mut from_regex = false;
3350
3351 while let Some((indent_row, delta)) = indent_changes.peek() {
3352 match indent_row.cmp(&row) {
3353 Ordering::Equal => match delta {
3354 Ordering::Less => {
3355 from_regex = true;
3356 outdent_from_prev_row = true
3357 }
3358 Ordering::Greater => {
3359 indent_from_prev_row = true;
3360 from_regex = true
3361 }
3362 _ => {}
3363 },
3364
3365 Ordering::Greater => break,
3366 Ordering::Less => {}
3367 }
3368
3369 indent_changes.next();
3370 }
3371
3372 for range in &indent_ranges {
3373 if range.start.row >= row {
3374 break;
3375 }
3376 if range.start.row == prev_row && range.end > row_start {
3377 indent_from_prev_row = true;
3378 }
3379 if range.end > prev_row_start && range.end <= row_start {
3380 outdent_to_row = outdent_to_row.min(range.start.row);
3381 }
3382 }
3383
3384 if let Some(basis_row) = regex_outdent_map.get(&row) {
3385 indent_from_prev_row = false;
3386 outdent_to_row = *basis_row;
3387 from_regex = true;
3388 }
3389
3390 let within_error = error_ranges
3391 .iter()
3392 .any(|e| e.start.row < row && e.end > row_start);
3393
3394 let suggestion = if outdent_to_row == prev_row
3395 || (outdent_from_prev_row && indent_from_prev_row)
3396 {
3397 Some(IndentSuggestion {
3398 basis_row: prev_row,
3399 delta: Ordering::Equal,
3400 within_error: within_error && !from_regex,
3401 })
3402 } else if indent_from_prev_row {
3403 Some(IndentSuggestion {
3404 basis_row: prev_row,
3405 delta: Ordering::Greater,
3406 within_error: within_error && !from_regex,
3407 })
3408 } else if outdent_to_row < prev_row {
3409 Some(IndentSuggestion {
3410 basis_row: outdent_to_row,
3411 delta: Ordering::Equal,
3412 within_error: within_error && !from_regex,
3413 })
3414 } else if outdent_from_prev_row {
3415 Some(IndentSuggestion {
3416 basis_row: prev_row,
3417 delta: Ordering::Less,
3418 within_error: within_error && !from_regex,
3419 })
3420 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3421 {
3422 Some(IndentSuggestion {
3423 basis_row: prev_row,
3424 delta: Ordering::Equal,
3425 within_error: within_error && !from_regex,
3426 })
3427 } else {
3428 None
3429 };
3430
3431 prev_row = row;
3432 prev_row_start = row_start;
3433 suggestion
3434 }))
3435 }
3436
3437 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3438 while row > 0 {
3439 row -= 1;
3440 if !self.is_line_blank(row) {
3441 return Some(row);
3442 }
3443 }
3444 None
3445 }
3446
3447 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3448 let captures = self.syntax.captures(range, &self.text, |grammar| {
3449 grammar
3450 .highlights_config
3451 .as_ref()
3452 .map(|config| &config.query)
3453 });
3454 let highlight_maps = captures
3455 .grammars()
3456 .iter()
3457 .map(|grammar| grammar.highlight_map())
3458 .collect();
3459 (captures, highlight_maps)
3460 }
3461
3462 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3463 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3464 /// returned in chunks where each chunk has a single syntax highlighting style and
3465 /// diagnostic status.
3466 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3467 let range = range.start.to_offset(self)..range.end.to_offset(self);
3468
3469 let mut syntax = None;
3470 if language_aware {
3471 syntax = Some(self.get_highlights(range.clone()));
3472 }
3473 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3474 let diagnostics = language_aware;
3475 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3476 }
3477
3478 pub fn highlighted_text_for_range<T: ToOffset>(
3479 &self,
3480 range: Range<T>,
3481 override_style: Option<HighlightStyle>,
3482 syntax_theme: &SyntaxTheme,
3483 ) -> HighlightedText {
3484 HighlightedText::from_buffer_range(
3485 range,
3486 &self.text,
3487 &self.syntax,
3488 override_style,
3489 syntax_theme,
3490 )
3491 }
3492
3493 /// Invokes the given callback for each line of text in the given range of the buffer.
3494 /// Uses callback to avoid allocating a string for each line.
3495 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3496 let mut line = String::new();
3497 let mut row = range.start.row;
3498 for chunk in self
3499 .as_rope()
3500 .chunks_in_range(range.to_offset(self))
3501 .chain(["\n"])
3502 {
3503 for (newline_ix, text) in chunk.split('\n').enumerate() {
3504 if newline_ix > 0 {
3505 callback(row, &line);
3506 row += 1;
3507 line.clear();
3508 }
3509 line.push_str(text);
3510 }
3511 }
3512 }
3513
3514 /// Iterates over every [`SyntaxLayer`] in the buffer.
3515 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3516 self.syntax_layers_for_range(0..self.len(), true)
3517 }
3518
3519 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3520 let offset = position.to_offset(self);
3521 self.syntax_layers_for_range(offset..offset, false)
3522 .filter(|l| {
3523 if let Some(ranges) = l.included_sub_ranges {
3524 ranges.iter().any(|range| {
3525 let start = range.start.to_offset(self);
3526 start <= offset && {
3527 let end = range.end.to_offset(self);
3528 offset < end
3529 }
3530 })
3531 } else {
3532 l.node().start_byte() <= offset && l.node().end_byte() > offset
3533 }
3534 })
3535 .last()
3536 }
3537
3538 pub fn syntax_layers_for_range<D: ToOffset>(
3539 &self,
3540 range: Range<D>,
3541 include_hidden: bool,
3542 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3543 self.syntax
3544 .layers_for_range(range, &self.text, include_hidden)
3545 }
3546
3547 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3548 &self,
3549 range: Range<D>,
3550 ) -> Option<SyntaxLayer<'_>> {
3551 let range = range.to_offset(self);
3552 self.syntax
3553 .layers_for_range(range, &self.text, false)
3554 .max_by(|a, b| {
3555 if a.depth != b.depth {
3556 a.depth.cmp(&b.depth)
3557 } else if a.offset.0 != b.offset.0 {
3558 a.offset.0.cmp(&b.offset.0)
3559 } else {
3560 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3561 }
3562 })
3563 }
3564
3565 /// Returns the main [`Language`].
3566 pub fn language(&self) -> Option<&Arc<Language>> {
3567 self.language.as_ref()
3568 }
3569
3570 /// Returns the [`Language`] at the given location.
3571 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3572 self.syntax_layer_at(position)
3573 .map(|info| info.language)
3574 .or(self.language.as_ref())
3575 }
3576
3577 /// Returns the settings for the language at the given location.
3578 pub fn settings_at<'a, D: ToOffset>(
3579 &'a self,
3580 position: D,
3581 cx: &'a App,
3582 ) -> Cow<'a, LanguageSettings> {
3583 language_settings(
3584 self.language_at(position).map(|l| l.name()),
3585 self.file.as_ref(),
3586 cx,
3587 )
3588 }
3589
3590 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3591 CharClassifier::new(self.language_scope_at(point))
3592 }
3593
3594 /// Returns the [`LanguageScope`] at the given location.
3595 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3596 let offset = position.to_offset(self);
3597 let mut scope = None;
3598 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3599
3600 // Use the layer that has the smallest node intersecting the given point.
3601 for layer in self
3602 .syntax
3603 .layers_for_range(offset..offset, &self.text, false)
3604 {
3605 let mut cursor = layer.node().walk();
3606
3607 let mut range = None;
3608 loop {
3609 let child_range = cursor.node().byte_range();
3610 if !child_range.contains(&offset) {
3611 break;
3612 }
3613
3614 range = Some(child_range);
3615 if cursor.goto_first_child_for_byte(offset).is_none() {
3616 break;
3617 }
3618 }
3619
3620 if let Some(range) = range
3621 && smallest_range_and_depth.as_ref().is_none_or(
3622 |(smallest_range, smallest_range_depth)| {
3623 if layer.depth > *smallest_range_depth {
3624 true
3625 } else if layer.depth == *smallest_range_depth {
3626 range.len() < smallest_range.len()
3627 } else {
3628 false
3629 }
3630 },
3631 )
3632 {
3633 smallest_range_and_depth = Some((range, layer.depth));
3634 scope = Some(LanguageScope {
3635 language: layer.language.clone(),
3636 override_id: layer.override_id(offset, &self.text),
3637 });
3638 }
3639 }
3640
3641 scope.or_else(|| {
3642 self.language.clone().map(|language| LanguageScope {
3643 language,
3644 override_id: None,
3645 })
3646 })
3647 }
3648
3649 /// Returns a tuple of the range and character kind of the word
3650 /// surrounding the given position.
3651 pub fn surrounding_word<T: ToOffset>(
3652 &self,
3653 start: T,
3654 scope_context: Option<CharScopeContext>,
3655 ) -> (Range<usize>, Option<CharKind>) {
3656 let mut start = start.to_offset(self);
3657 let mut end = start;
3658 let mut next_chars = self.chars_at(start).take(128).peekable();
3659 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3660
3661 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3662 let word_kind = cmp::max(
3663 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3664 next_chars.peek().copied().map(|c| classifier.kind(c)),
3665 );
3666
3667 for ch in prev_chars {
3668 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3669 start -= ch.len_utf8();
3670 } else {
3671 break;
3672 }
3673 }
3674
3675 for ch in next_chars {
3676 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3677 end += ch.len_utf8();
3678 } else {
3679 break;
3680 }
3681 }
3682
3683 (start..end, word_kind)
3684 }
3685
3686 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3687 /// range. When `require_larger` is true, the node found must be larger than the query range.
3688 ///
3689 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3690 /// be moved to the root of the tree.
3691 fn goto_node_enclosing_range(
3692 cursor: &mut tree_sitter::TreeCursor,
3693 query_range: &Range<usize>,
3694 require_larger: bool,
3695 ) -> bool {
3696 let mut ascending = false;
3697 loop {
3698 let mut range = cursor.node().byte_range();
3699 if query_range.is_empty() {
3700 // When the query range is empty and the current node starts after it, move to the
3701 // previous sibling to find the node the containing node.
3702 if range.start > query_range.start {
3703 cursor.goto_previous_sibling();
3704 range = cursor.node().byte_range();
3705 }
3706 } else {
3707 // When the query range is non-empty and the current node ends exactly at the start,
3708 // move to the next sibling to find a node that extends beyond the start.
3709 if range.end == query_range.start {
3710 cursor.goto_next_sibling();
3711 range = cursor.node().byte_range();
3712 }
3713 }
3714
3715 let encloses = range.contains_inclusive(query_range)
3716 && (!require_larger || range.len() > query_range.len());
3717 if !encloses {
3718 ascending = true;
3719 if !cursor.goto_parent() {
3720 return false;
3721 }
3722 continue;
3723 } else if ascending {
3724 return true;
3725 }
3726
3727 // Descend into the current node.
3728 if cursor
3729 .goto_first_child_for_byte(query_range.start)
3730 .is_none()
3731 {
3732 return true;
3733 }
3734 }
3735 }
3736
3737 pub fn syntax_ancestor<'a, T: ToOffset>(
3738 &'a self,
3739 range: Range<T>,
3740 ) -> Option<tree_sitter::Node<'a>> {
3741 let range = range.start.to_offset(self)..range.end.to_offset(self);
3742 let mut result: Option<tree_sitter::Node<'a>> = None;
3743 for layer in self
3744 .syntax
3745 .layers_for_range(range.clone(), &self.text, true)
3746 {
3747 let mut cursor = layer.node().walk();
3748
3749 // Find the node that both contains the range and is larger than it.
3750 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3751 continue;
3752 }
3753
3754 let left_node = cursor.node();
3755 let mut layer_result = left_node;
3756
3757 // For an empty range, try to find another node immediately to the right of the range.
3758 if left_node.end_byte() == range.start {
3759 let mut right_node = None;
3760 while !cursor.goto_next_sibling() {
3761 if !cursor.goto_parent() {
3762 break;
3763 }
3764 }
3765
3766 while cursor.node().start_byte() == range.start {
3767 right_node = Some(cursor.node());
3768 if !cursor.goto_first_child() {
3769 break;
3770 }
3771 }
3772
3773 // If there is a candidate node on both sides of the (empty) range, then
3774 // decide between the two by favoring a named node over an anonymous token.
3775 // If both nodes are the same in that regard, favor the right one.
3776 if let Some(right_node) = right_node
3777 && (right_node.is_named() || !left_node.is_named())
3778 {
3779 layer_result = right_node;
3780 }
3781 }
3782
3783 if let Some(previous_result) = &result
3784 && previous_result.byte_range().len() < layer_result.byte_range().len()
3785 {
3786 continue;
3787 }
3788 result = Some(layer_result);
3789 }
3790
3791 result
3792 }
3793
3794 /// Find the previous sibling syntax node at the given range.
3795 ///
3796 /// This function locates the syntax node that precedes the node containing
3797 /// the given range. It searches hierarchically by:
3798 /// 1. Finding the node that contains the given range
3799 /// 2. Looking for the previous sibling at the same tree level
3800 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3801 ///
3802 /// Returns `None` if there is no previous sibling at any ancestor level.
3803 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3804 &'a self,
3805 range: Range<T>,
3806 ) -> Option<tree_sitter::Node<'a>> {
3807 let range = range.start.to_offset(self)..range.end.to_offset(self);
3808 let mut result: Option<tree_sitter::Node<'a>> = None;
3809
3810 for layer in self
3811 .syntax
3812 .layers_for_range(range.clone(), &self.text, true)
3813 {
3814 let mut cursor = layer.node().walk();
3815
3816 // Find the node that contains the range
3817 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3818 continue;
3819 }
3820
3821 // Look for the previous sibling, moving up ancestor levels if needed
3822 loop {
3823 if cursor.goto_previous_sibling() {
3824 let layer_result = cursor.node();
3825
3826 if let Some(previous_result) = &result {
3827 if previous_result.byte_range().end < layer_result.byte_range().end {
3828 continue;
3829 }
3830 }
3831 result = Some(layer_result);
3832 break;
3833 }
3834
3835 // No sibling found at this level, try moving up to parent
3836 if !cursor.goto_parent() {
3837 break;
3838 }
3839 }
3840 }
3841
3842 result
3843 }
3844
3845 /// Find the next sibling syntax node at the given range.
3846 ///
3847 /// This function locates the syntax node that follows the node containing
3848 /// the given range. It searches hierarchically by:
3849 /// 1. Finding the node that contains the given range
3850 /// 2. Looking for the next sibling at the same tree level
3851 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3852 ///
3853 /// Returns `None` if there is no next sibling at any ancestor level.
3854 pub fn syntax_next_sibling<'a, T: ToOffset>(
3855 &'a self,
3856 range: Range<T>,
3857 ) -> Option<tree_sitter::Node<'a>> {
3858 let range = range.start.to_offset(self)..range.end.to_offset(self);
3859 let mut result: Option<tree_sitter::Node<'a>> = None;
3860
3861 for layer in self
3862 .syntax
3863 .layers_for_range(range.clone(), &self.text, true)
3864 {
3865 let mut cursor = layer.node().walk();
3866
3867 // Find the node that contains the range
3868 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3869 continue;
3870 }
3871
3872 // Look for the next sibling, moving up ancestor levels if needed
3873 loop {
3874 if cursor.goto_next_sibling() {
3875 let layer_result = cursor.node();
3876
3877 if let Some(previous_result) = &result {
3878 if previous_result.byte_range().start > layer_result.byte_range().start {
3879 continue;
3880 }
3881 }
3882 result = Some(layer_result);
3883 break;
3884 }
3885
3886 // No sibling found at this level, try moving up to parent
3887 if !cursor.goto_parent() {
3888 break;
3889 }
3890 }
3891 }
3892
3893 result
3894 }
3895
3896 /// Returns the root syntax node within the given row
3897 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3898 let start_offset = position.to_offset(self);
3899
3900 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3901
3902 let layer = self
3903 .syntax
3904 .layers_for_range(start_offset..start_offset, &self.text, true)
3905 .next()?;
3906
3907 let mut cursor = layer.node().walk();
3908
3909 // Descend to the first leaf that touches the start of the range.
3910 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3911 if cursor.node().end_byte() == start_offset {
3912 cursor.goto_next_sibling();
3913 }
3914 }
3915
3916 // Ascend to the root node within the same row.
3917 while cursor.goto_parent() {
3918 if cursor.node().start_position().row != row {
3919 break;
3920 }
3921 }
3922
3923 Some(cursor.node())
3924 }
3925
3926 /// Returns the outline for the buffer.
3927 ///
3928 /// This method allows passing an optional [`SyntaxTheme`] to
3929 /// syntax-highlight the returned symbols.
3930 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3931 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3932 }
3933
3934 /// Returns all the symbols that contain the given position.
3935 ///
3936 /// This method allows passing an optional [`SyntaxTheme`] to
3937 /// syntax-highlight the returned symbols.
3938 pub fn symbols_containing<T: ToOffset>(
3939 &self,
3940 position: T,
3941 theme: Option<&SyntaxTheme>,
3942 ) -> Vec<OutlineItem<Anchor>> {
3943 let position = position.to_offset(self);
3944 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3945 let end = self.clip_offset(position + 1, Bias::Right);
3946 let mut items = self.outline_items_containing(start..end, false, theme);
3947 let mut prev_depth = None;
3948 items.retain(|item| {
3949 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3950 prev_depth = Some(item.depth);
3951 result
3952 });
3953 items
3954 }
3955
3956 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3957 let range = range.to_offset(self);
3958 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3959 grammar.outline_config.as_ref().map(|c| &c.query)
3960 });
3961 let configs = matches
3962 .grammars()
3963 .iter()
3964 .map(|g| g.outline_config.as_ref().unwrap())
3965 .collect::<Vec<_>>();
3966
3967 while let Some(mat) = matches.peek() {
3968 let config = &configs[mat.grammar_index];
3969 let containing_item_node = maybe!({
3970 let item_node = mat.captures.iter().find_map(|cap| {
3971 if cap.index == config.item_capture_ix {
3972 Some(cap.node)
3973 } else {
3974 None
3975 }
3976 })?;
3977
3978 let item_byte_range = item_node.byte_range();
3979 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3980 None
3981 } else {
3982 Some(item_node)
3983 }
3984 });
3985
3986 if let Some(item_node) = containing_item_node {
3987 return Some(
3988 Point::from_ts_point(item_node.start_position())
3989 ..Point::from_ts_point(item_node.end_position()),
3990 );
3991 }
3992
3993 matches.advance();
3994 }
3995 None
3996 }
3997
3998 pub fn outline_items_containing<T: ToOffset>(
3999 &self,
4000 range: Range<T>,
4001 include_extra_context: bool,
4002 theme: Option<&SyntaxTheme>,
4003 ) -> Vec<OutlineItem<Anchor>> {
4004 self.outline_items_containing_internal(
4005 range,
4006 include_extra_context,
4007 theme,
4008 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4009 )
4010 }
4011
4012 pub fn outline_items_as_points_containing<T: ToOffset>(
4013 &self,
4014 range: Range<T>,
4015 include_extra_context: bool,
4016 theme: Option<&SyntaxTheme>,
4017 ) -> Vec<OutlineItem<Point>> {
4018 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4019 range
4020 })
4021 }
4022
4023 fn outline_items_containing_internal<T: ToOffset, U>(
4024 &self,
4025 range: Range<T>,
4026 include_extra_context: bool,
4027 theme: Option<&SyntaxTheme>,
4028 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4029 ) -> Vec<OutlineItem<U>> {
4030 let range = range.to_offset(self);
4031 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4032 grammar.outline_config.as_ref().map(|c| &c.query)
4033 });
4034
4035 let mut items = Vec::new();
4036 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4037 while let Some(mat) = matches.peek() {
4038 let config = matches.grammars()[mat.grammar_index]
4039 .outline_config
4040 .as_ref()
4041 .unwrap();
4042 if let Some(item) =
4043 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4044 {
4045 items.push(item);
4046 } else if let Some(capture) = mat
4047 .captures
4048 .iter()
4049 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4050 {
4051 let capture_range = capture.node.start_position()..capture.node.end_position();
4052 let mut capture_row_range =
4053 capture_range.start.row as u32..capture_range.end.row as u32;
4054 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4055 {
4056 capture_row_range.end -= 1;
4057 }
4058 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4059 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4060 last_row_range.end = capture_row_range.end;
4061 } else {
4062 annotation_row_ranges.push(capture_row_range);
4063 }
4064 } else {
4065 annotation_row_ranges.push(capture_row_range);
4066 }
4067 }
4068 matches.advance();
4069 }
4070
4071 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4072
4073 // Assign depths based on containment relationships and convert to anchors.
4074 let mut item_ends_stack = Vec::<Point>::new();
4075 let mut anchor_items = Vec::new();
4076 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4077 for item in items {
4078 while let Some(last_end) = item_ends_stack.last().copied() {
4079 if last_end < item.range.end {
4080 item_ends_stack.pop();
4081 } else {
4082 break;
4083 }
4084 }
4085
4086 let mut annotation_row_range = None;
4087 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4088 let row_preceding_item = item.range.start.row.saturating_sub(1);
4089 if next_annotation_row_range.end < row_preceding_item {
4090 annotation_row_ranges.next();
4091 } else {
4092 if next_annotation_row_range.end == row_preceding_item {
4093 annotation_row_range = Some(next_annotation_row_range.clone());
4094 annotation_row_ranges.next();
4095 }
4096 break;
4097 }
4098 }
4099
4100 anchor_items.push(OutlineItem {
4101 depth: item_ends_stack.len(),
4102 range: range_callback(self, item.range.clone()),
4103 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4104 text: item.text,
4105 highlight_ranges: item.highlight_ranges,
4106 name_ranges: item.name_ranges,
4107 body_range: item.body_range.map(|r| range_callback(self, r)),
4108 annotation_range: annotation_row_range.map(|annotation_range| {
4109 let point_range = Point::new(annotation_range.start, 0)
4110 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4111 range_callback(self, point_range)
4112 }),
4113 });
4114 item_ends_stack.push(item.range.end);
4115 }
4116
4117 anchor_items
4118 }
4119
4120 fn next_outline_item(
4121 &self,
4122 config: &OutlineConfig,
4123 mat: &SyntaxMapMatch,
4124 range: &Range<usize>,
4125 include_extra_context: bool,
4126 theme: Option<&SyntaxTheme>,
4127 ) -> Option<OutlineItem<Point>> {
4128 let item_node = mat.captures.iter().find_map(|cap| {
4129 if cap.index == config.item_capture_ix {
4130 Some(cap.node)
4131 } else {
4132 None
4133 }
4134 })?;
4135
4136 let item_byte_range = item_node.byte_range();
4137 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4138 return None;
4139 }
4140 let item_point_range = Point::from_ts_point(item_node.start_position())
4141 ..Point::from_ts_point(item_node.end_position());
4142
4143 let mut open_point = None;
4144 let mut close_point = None;
4145
4146 let mut buffer_ranges = Vec::new();
4147 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4148 let mut range = node.start_byte()..node.end_byte();
4149 let start = node.start_position();
4150 if node.end_position().row > start.row {
4151 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4152 }
4153
4154 if !range.is_empty() {
4155 buffer_ranges.push((range, node_is_name));
4156 }
4157 };
4158
4159 for capture in mat.captures {
4160 if capture.index == config.name_capture_ix {
4161 add_to_buffer_ranges(capture.node, true);
4162 } else if Some(capture.index) == config.context_capture_ix
4163 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4164 {
4165 add_to_buffer_ranges(capture.node, false);
4166 } else {
4167 if Some(capture.index) == config.open_capture_ix {
4168 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4169 } else if Some(capture.index) == config.close_capture_ix {
4170 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4171 }
4172 }
4173 }
4174
4175 if buffer_ranges.is_empty() {
4176 return None;
4177 }
4178 let source_range_for_text =
4179 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4180
4181 let mut text = String::new();
4182 let mut highlight_ranges = Vec::new();
4183 let mut name_ranges = Vec::new();
4184 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4185 let mut last_buffer_range_end = 0;
4186 for (buffer_range, is_name) in buffer_ranges {
4187 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4188 if space_added {
4189 text.push(' ');
4190 }
4191 let before_append_len = text.len();
4192 let mut offset = buffer_range.start;
4193 chunks.seek(buffer_range.clone());
4194 for mut chunk in chunks.by_ref() {
4195 if chunk.text.len() > buffer_range.end - offset {
4196 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4197 offset = buffer_range.end;
4198 } else {
4199 offset += chunk.text.len();
4200 }
4201 let style = chunk
4202 .syntax_highlight_id
4203 .zip(theme)
4204 .and_then(|(highlight, theme)| highlight.style(theme));
4205 if let Some(style) = style {
4206 let start = text.len();
4207 let end = start + chunk.text.len();
4208 highlight_ranges.push((start..end, style));
4209 }
4210 text.push_str(chunk.text);
4211 if offset >= buffer_range.end {
4212 break;
4213 }
4214 }
4215 if is_name {
4216 let after_append_len = text.len();
4217 let start = if space_added && !name_ranges.is_empty() {
4218 before_append_len - 1
4219 } else {
4220 before_append_len
4221 };
4222 name_ranges.push(start..after_append_len);
4223 }
4224 last_buffer_range_end = buffer_range.end;
4225 }
4226
4227 Some(OutlineItem {
4228 depth: 0, // We'll calculate the depth later
4229 range: item_point_range,
4230 source_range_for_text: source_range_for_text.to_point(self),
4231 text,
4232 highlight_ranges,
4233 name_ranges,
4234 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4235 annotation_range: None,
4236 })
4237 }
4238
4239 pub fn function_body_fold_ranges<T: ToOffset>(
4240 &self,
4241 within: Range<T>,
4242 ) -> impl Iterator<Item = Range<usize>> + '_ {
4243 self.text_object_ranges(within, TreeSitterOptions::default())
4244 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4245 }
4246
4247 /// For each grammar in the language, runs the provided
4248 /// [`tree_sitter::Query`] against the given range.
4249 pub fn matches(
4250 &self,
4251 range: Range<usize>,
4252 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4253 ) -> SyntaxMapMatches<'_> {
4254 self.syntax.matches(range, self, query)
4255 }
4256
4257 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4258 /// Hence, may return more bracket pairs than the range contains.
4259 ///
4260 /// Will omit known chunks.
4261 /// The resulting bracket match collections are not ordered.
4262 pub fn fetch_bracket_ranges(
4263 &self,
4264 range: Range<usize>,
4265 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4266 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4267 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4268
4269 let known_chunks = match known_chunks {
4270 Some((known_version, known_chunks)) => {
4271 if !tree_sitter_data
4272 .chunks
4273 .version()
4274 .changed_since(known_version)
4275 {
4276 known_chunks.clone()
4277 } else {
4278 HashSet::default()
4279 }
4280 }
4281 None => HashSet::default(),
4282 };
4283
4284 let mut new_bracket_matches = HashMap::default();
4285 let mut all_bracket_matches = HashMap::default();
4286
4287 for chunk in tree_sitter_data
4288 .chunks
4289 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4290 {
4291 if known_chunks.contains(&chunk.row_range()) {
4292 continue;
4293 }
4294 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4295 continue;
4296 };
4297 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4298
4299 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4300 Some(cached_brackets) => cached_brackets,
4301 None => {
4302 let mut all_brackets = Vec::new();
4303 let mut opens = Vec::new();
4304 let mut color_pairs = Vec::new();
4305
4306 let mut matches =
4307 self.syntax
4308 .matches(chunk_range.clone(), &self.text, |grammar| {
4309 grammar.brackets_config.as_ref().map(|c| &c.query)
4310 });
4311 let configs = matches
4312 .grammars()
4313 .iter()
4314 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4315 .collect::<Vec<_>>();
4316
4317 while let Some(mat) = matches.peek() {
4318 let mut open = None;
4319 let mut close = None;
4320 let syntax_layer_depth = mat.depth;
4321 let config = configs[mat.grammar_index];
4322 let pattern = &config.patterns[mat.pattern_index];
4323 for capture in mat.captures {
4324 if capture.index == config.open_capture_ix {
4325 open = Some(capture.node.byte_range());
4326 } else if capture.index == config.close_capture_ix {
4327 close = Some(capture.node.byte_range());
4328 }
4329 }
4330
4331 matches.advance();
4332
4333 let Some((open_range, close_range)) = open.zip(close) else {
4334 continue;
4335 };
4336
4337 let bracket_range = open_range.start..=close_range.end;
4338 if !bracket_range.overlaps(&chunk_range) {
4339 continue;
4340 }
4341
4342 let index = all_brackets.len();
4343 all_brackets.push(BracketMatch {
4344 open_range: open_range.clone(),
4345 close_range: close_range.clone(),
4346 newline_only: pattern.newline_only,
4347 syntax_layer_depth,
4348 color_index: None,
4349 });
4350
4351 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4352 // bracket will match the entire tag with all text inside.
4353 // For now, avoid highlighting any pair that has more than single char in each bracket.
4354 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4355 let should_color = !pattern.rainbow_exclude
4356 && (open_range.len() == 1 || close_range.len() == 1);
4357 if should_color {
4358 opens.push(open_range.clone());
4359 color_pairs.push((open_range, close_range, index));
4360 }
4361 }
4362
4363 opens.sort_by_key(|r| (r.start, r.end));
4364 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4365 color_pairs.sort_by_key(|(_, close, _)| close.end);
4366
4367 let mut open_stack = Vec::new();
4368 let mut open_index = 0;
4369 for (open, close, index) in color_pairs {
4370 while open_index < opens.len() && opens[open_index].start < close.start {
4371 open_stack.push(opens[open_index].clone());
4372 open_index += 1;
4373 }
4374
4375 if open_stack.last() == Some(&open) {
4376 let depth_index = open_stack.len() - 1;
4377 all_brackets[index].color_index = Some(depth_index);
4378 open_stack.pop();
4379 }
4380 }
4381
4382 all_brackets.sort_by_key(|bracket_match| {
4383 (bracket_match.open_range.start, bracket_match.open_range.end)
4384 });
4385 new_bracket_matches.insert(chunk.id, all_brackets.clone());
4386 all_brackets
4387 }
4388 };
4389 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4390 }
4391
4392 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4393 if latest_tree_sitter_data.chunks.version() == &self.version {
4394 for (chunk_id, new_matches) in new_bracket_matches {
4395 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4396 if old_chunks.is_none() {
4397 *old_chunks = Some(new_matches);
4398 }
4399 }
4400 }
4401
4402 all_bracket_matches
4403 }
4404
4405 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4406 let mut tree_sitter_data = self.tree_sitter_data.lock();
4407 if self
4408 .version
4409 .changed_since(tree_sitter_data.chunks.version())
4410 {
4411 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4412 }
4413 tree_sitter_data
4414 }
4415
4416 pub fn all_bracket_ranges(
4417 &self,
4418 range: Range<usize>,
4419 ) -> impl Iterator<Item = BracketMatch<usize>> {
4420 self.fetch_bracket_ranges(range.clone(), None)
4421 .into_values()
4422 .flatten()
4423 .filter(move |bracket_match| {
4424 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4425 bracket_range.overlaps(&range)
4426 })
4427 }
4428
4429 /// Returns bracket range pairs overlapping or adjacent to `range`
4430 pub fn bracket_ranges<T: ToOffset>(
4431 &self,
4432 range: Range<T>,
4433 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4434 // Find bracket pairs that *inclusively* contain the given range.
4435 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4436 self.all_bracket_ranges(range)
4437 .filter(|pair| !pair.newline_only)
4438 }
4439
4440 pub fn debug_variables_query<T: ToOffset>(
4441 &self,
4442 range: Range<T>,
4443 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4444 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4445
4446 let mut matches = self.syntax.matches_with_options(
4447 range.clone(),
4448 &self.text,
4449 TreeSitterOptions::default(),
4450 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4451 );
4452
4453 let configs = matches
4454 .grammars()
4455 .iter()
4456 .map(|grammar| grammar.debug_variables_config.as_ref())
4457 .collect::<Vec<_>>();
4458
4459 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4460
4461 iter::from_fn(move || {
4462 loop {
4463 while let Some(capture) = captures.pop() {
4464 if capture.0.overlaps(&range) {
4465 return Some(capture);
4466 }
4467 }
4468
4469 let mat = matches.peek()?;
4470
4471 let Some(config) = configs[mat.grammar_index].as_ref() else {
4472 matches.advance();
4473 continue;
4474 };
4475
4476 for capture in mat.captures {
4477 let Some(ix) = config
4478 .objects_by_capture_ix
4479 .binary_search_by_key(&capture.index, |e| e.0)
4480 .ok()
4481 else {
4482 continue;
4483 };
4484 let text_object = config.objects_by_capture_ix[ix].1;
4485 let byte_range = capture.node.byte_range();
4486
4487 let mut found = false;
4488 for (range, existing) in captures.iter_mut() {
4489 if existing == &text_object {
4490 range.start = range.start.min(byte_range.start);
4491 range.end = range.end.max(byte_range.end);
4492 found = true;
4493 break;
4494 }
4495 }
4496
4497 if !found {
4498 captures.push((byte_range, text_object));
4499 }
4500 }
4501
4502 matches.advance();
4503 }
4504 })
4505 }
4506
4507 pub fn text_object_ranges<T: ToOffset>(
4508 &self,
4509 range: Range<T>,
4510 options: TreeSitterOptions,
4511 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4512 let range =
4513 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4514
4515 let mut matches =
4516 self.syntax
4517 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4518 grammar.text_object_config.as_ref().map(|c| &c.query)
4519 });
4520
4521 let configs = matches
4522 .grammars()
4523 .iter()
4524 .map(|grammar| grammar.text_object_config.as_ref())
4525 .collect::<Vec<_>>();
4526
4527 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4528
4529 iter::from_fn(move || {
4530 loop {
4531 while let Some(capture) = captures.pop() {
4532 if capture.0.overlaps(&range) {
4533 return Some(capture);
4534 }
4535 }
4536
4537 let mat = matches.peek()?;
4538
4539 let Some(config) = configs[mat.grammar_index].as_ref() else {
4540 matches.advance();
4541 continue;
4542 };
4543
4544 for capture in mat.captures {
4545 let Some(ix) = config
4546 .text_objects_by_capture_ix
4547 .binary_search_by_key(&capture.index, |e| e.0)
4548 .ok()
4549 else {
4550 continue;
4551 };
4552 let text_object = config.text_objects_by_capture_ix[ix].1;
4553 let byte_range = capture.node.byte_range();
4554
4555 let mut found = false;
4556 for (range, existing) in captures.iter_mut() {
4557 if existing == &text_object {
4558 range.start = range.start.min(byte_range.start);
4559 range.end = range.end.max(byte_range.end);
4560 found = true;
4561 break;
4562 }
4563 }
4564
4565 if !found {
4566 captures.push((byte_range, text_object));
4567 }
4568 }
4569
4570 matches.advance();
4571 }
4572 })
4573 }
4574
4575 /// Returns enclosing bracket ranges containing the given range
4576 pub fn enclosing_bracket_ranges<T: ToOffset>(
4577 &self,
4578 range: Range<T>,
4579 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4580 let range = range.start.to_offset(self)..range.end.to_offset(self);
4581
4582 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4583 let max_depth = result
4584 .iter()
4585 .map(|mat| mat.syntax_layer_depth)
4586 .max()
4587 .unwrap_or(0);
4588 result.into_iter().filter(move |pair| {
4589 pair.open_range.start <= range.start
4590 && pair.close_range.end >= range.end
4591 && pair.syntax_layer_depth == max_depth
4592 })
4593 }
4594
4595 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4596 ///
4597 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4598 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4599 &self,
4600 range: Range<T>,
4601 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4602 ) -> Option<(Range<usize>, Range<usize>)> {
4603 let range = range.start.to_offset(self)..range.end.to_offset(self);
4604
4605 // Get the ranges of the innermost pair of brackets.
4606 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4607
4608 for pair in self.enclosing_bracket_ranges(range) {
4609 if let Some(range_filter) = range_filter
4610 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4611 {
4612 continue;
4613 }
4614
4615 let len = pair.close_range.end - pair.open_range.start;
4616
4617 if let Some((existing_open, existing_close)) = &result {
4618 let existing_len = existing_close.end - existing_open.start;
4619 if len > existing_len {
4620 continue;
4621 }
4622 }
4623
4624 result = Some((pair.open_range, pair.close_range));
4625 }
4626
4627 result
4628 }
4629
4630 /// Returns anchor ranges for any matches of the redaction query.
4631 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4632 /// will be run on the relevant section of the buffer.
4633 pub fn redacted_ranges<T: ToOffset>(
4634 &self,
4635 range: Range<T>,
4636 ) -> impl Iterator<Item = Range<usize>> + '_ {
4637 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4638 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4639 grammar
4640 .redactions_config
4641 .as_ref()
4642 .map(|config| &config.query)
4643 });
4644
4645 let configs = syntax_matches
4646 .grammars()
4647 .iter()
4648 .map(|grammar| grammar.redactions_config.as_ref())
4649 .collect::<Vec<_>>();
4650
4651 iter::from_fn(move || {
4652 let redacted_range = syntax_matches
4653 .peek()
4654 .and_then(|mat| {
4655 configs[mat.grammar_index].and_then(|config| {
4656 mat.captures
4657 .iter()
4658 .find(|capture| capture.index == config.redaction_capture_ix)
4659 })
4660 })
4661 .map(|mat| mat.node.byte_range());
4662 syntax_matches.advance();
4663 redacted_range
4664 })
4665 }
4666
4667 pub fn injections_intersecting_range<T: ToOffset>(
4668 &self,
4669 range: Range<T>,
4670 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4671 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4672
4673 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4674 grammar
4675 .injection_config
4676 .as_ref()
4677 .map(|config| &config.query)
4678 });
4679
4680 let configs = syntax_matches
4681 .grammars()
4682 .iter()
4683 .map(|grammar| grammar.injection_config.as_ref())
4684 .collect::<Vec<_>>();
4685
4686 iter::from_fn(move || {
4687 let ranges = syntax_matches.peek().and_then(|mat| {
4688 let config = &configs[mat.grammar_index]?;
4689 let content_capture_range = mat.captures.iter().find_map(|capture| {
4690 if capture.index == config.content_capture_ix {
4691 Some(capture.node.byte_range())
4692 } else {
4693 None
4694 }
4695 })?;
4696 let language = self.language_at(content_capture_range.start)?;
4697 Some((content_capture_range, language))
4698 });
4699 syntax_matches.advance();
4700 ranges
4701 })
4702 }
4703
4704 pub fn runnable_ranges(
4705 &self,
4706 offset_range: Range<usize>,
4707 ) -> impl Iterator<Item = RunnableRange> + '_ {
4708 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4709 grammar.runnable_config.as_ref().map(|config| &config.query)
4710 });
4711
4712 let test_configs = syntax_matches
4713 .grammars()
4714 .iter()
4715 .map(|grammar| grammar.runnable_config.as_ref())
4716 .collect::<Vec<_>>();
4717
4718 iter::from_fn(move || {
4719 loop {
4720 let mat = syntax_matches.peek()?;
4721
4722 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4723 let mut run_range = None;
4724 let full_range = mat.captures.iter().fold(
4725 Range {
4726 start: usize::MAX,
4727 end: 0,
4728 },
4729 |mut acc, next| {
4730 let byte_range = next.node.byte_range();
4731 if acc.start > byte_range.start {
4732 acc.start = byte_range.start;
4733 }
4734 if acc.end < byte_range.end {
4735 acc.end = byte_range.end;
4736 }
4737 acc
4738 },
4739 );
4740 if full_range.start > full_range.end {
4741 // We did not find a full spanning range of this match.
4742 return None;
4743 }
4744 let extra_captures: SmallVec<[_; 1]> =
4745 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4746 test_configs
4747 .extra_captures
4748 .get(capture.index as usize)
4749 .cloned()
4750 .and_then(|tag_name| match tag_name {
4751 RunnableCapture::Named(name) => {
4752 Some((capture.node.byte_range(), name))
4753 }
4754 RunnableCapture::Run => {
4755 let _ = run_range.insert(capture.node.byte_range());
4756 None
4757 }
4758 })
4759 }));
4760 let run_range = run_range?;
4761 let tags = test_configs
4762 .query
4763 .property_settings(mat.pattern_index)
4764 .iter()
4765 .filter_map(|property| {
4766 if *property.key == *"tag" {
4767 property
4768 .value
4769 .as_ref()
4770 .map(|value| RunnableTag(value.to_string().into()))
4771 } else {
4772 None
4773 }
4774 })
4775 .collect();
4776 let extra_captures = extra_captures
4777 .into_iter()
4778 .map(|(range, name)| {
4779 (
4780 name.to_string(),
4781 self.text_for_range(range).collect::<String>(),
4782 )
4783 })
4784 .collect();
4785 // All tags should have the same range.
4786 Some(RunnableRange {
4787 run_range,
4788 full_range,
4789 runnable: Runnable {
4790 tags,
4791 language: mat.language,
4792 buffer: self.remote_id(),
4793 },
4794 extra_captures,
4795 buffer_id: self.remote_id(),
4796 })
4797 });
4798
4799 syntax_matches.advance();
4800 if test_range.is_some() {
4801 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4802 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4803 return test_range;
4804 }
4805 }
4806 })
4807 }
4808
4809 /// Returns selections for remote peers intersecting the given range.
4810 #[allow(clippy::type_complexity)]
4811 pub fn selections_in_range(
4812 &self,
4813 range: Range<Anchor>,
4814 include_local: bool,
4815 ) -> impl Iterator<
4816 Item = (
4817 ReplicaId,
4818 bool,
4819 CursorShape,
4820 impl Iterator<Item = &Selection<Anchor>> + '_,
4821 ),
4822 > + '_ {
4823 self.remote_selections
4824 .iter()
4825 .filter(move |(replica_id, set)| {
4826 (include_local || **replica_id != self.text.replica_id())
4827 && !set.selections.is_empty()
4828 })
4829 .map(move |(replica_id, set)| {
4830 let start_ix = match set.selections.binary_search_by(|probe| {
4831 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4832 }) {
4833 Ok(ix) | Err(ix) => ix,
4834 };
4835 let end_ix = match set.selections.binary_search_by(|probe| {
4836 probe.start.cmp(&range.end, self).then(Ordering::Less)
4837 }) {
4838 Ok(ix) | Err(ix) => ix,
4839 };
4840
4841 (
4842 *replica_id,
4843 set.line_mode,
4844 set.cursor_shape,
4845 set.selections[start_ix..end_ix].iter(),
4846 )
4847 })
4848 }
4849
4850 /// Returns if the buffer contains any diagnostics.
4851 pub fn has_diagnostics(&self) -> bool {
4852 !self.diagnostics.is_empty()
4853 }
4854
4855 /// Returns all the diagnostics intersecting the given range.
4856 pub fn diagnostics_in_range<'a, T, O>(
4857 &'a self,
4858 search_range: Range<T>,
4859 reversed: bool,
4860 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4861 where
4862 T: 'a + Clone + ToOffset,
4863 O: 'a + FromAnchor,
4864 {
4865 let mut iterators: Vec<_> = self
4866 .diagnostics
4867 .iter()
4868 .map(|(_, collection)| {
4869 collection
4870 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4871 .peekable()
4872 })
4873 .collect();
4874
4875 std::iter::from_fn(move || {
4876 let (next_ix, _) = iterators
4877 .iter_mut()
4878 .enumerate()
4879 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4880 .min_by(|(_, a), (_, b)| {
4881 let cmp = a
4882 .range
4883 .start
4884 .cmp(&b.range.start, self)
4885 // when range is equal, sort by diagnostic severity
4886 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4887 // and stabilize order with group_id
4888 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4889 if reversed { cmp.reverse() } else { cmp }
4890 })?;
4891 iterators[next_ix]
4892 .next()
4893 .map(
4894 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4895 diagnostic,
4896 range: FromAnchor::from_anchor(&range.start, self)
4897 ..FromAnchor::from_anchor(&range.end, self),
4898 },
4899 )
4900 })
4901 }
4902
4903 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4904 /// should be used instead.
4905 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4906 &self.diagnostics
4907 }
4908
4909 /// Returns all the diagnostic groups associated with the given
4910 /// language server ID. If no language server ID is provided,
4911 /// all diagnostics groups are returned.
4912 pub fn diagnostic_groups(
4913 &self,
4914 language_server_id: Option<LanguageServerId>,
4915 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4916 let mut groups = Vec::new();
4917
4918 if let Some(language_server_id) = language_server_id {
4919 if let Ok(ix) = self
4920 .diagnostics
4921 .binary_search_by_key(&language_server_id, |e| e.0)
4922 {
4923 self.diagnostics[ix]
4924 .1
4925 .groups(language_server_id, &mut groups, self);
4926 }
4927 } else {
4928 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4929 diagnostics.groups(*language_server_id, &mut groups, self);
4930 }
4931 }
4932
4933 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4934 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4935 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4936 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4937 });
4938
4939 groups
4940 }
4941
4942 /// Returns an iterator over the diagnostics for the given group.
4943 pub fn diagnostic_group<O>(
4944 &self,
4945 group_id: usize,
4946 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4947 where
4948 O: FromAnchor + 'static,
4949 {
4950 self.diagnostics
4951 .iter()
4952 .flat_map(move |(_, set)| set.group(group_id, self))
4953 }
4954
4955 /// An integer version number that accounts for all updates besides
4956 /// the buffer's text itself (which is versioned via a version vector).
4957 pub fn non_text_state_update_count(&self) -> usize {
4958 self.non_text_state_update_count
4959 }
4960
4961 /// An integer version that changes when the buffer's syntax changes.
4962 pub fn syntax_update_count(&self) -> usize {
4963 self.syntax.update_count()
4964 }
4965
4966 /// Returns a snapshot of underlying file.
4967 pub fn file(&self) -> Option<&Arc<dyn File>> {
4968 self.file.as_ref()
4969 }
4970
4971 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4972 if let Some(file) = self.file() {
4973 if file.path().file_name().is_none() || include_root {
4974 Some(file.full_path(cx).to_string_lossy().into_owned())
4975 } else {
4976 Some(file.path().display(file.path_style(cx)).to_string())
4977 }
4978 } else {
4979 None
4980 }
4981 }
4982
4983 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4984 let query_str = query.fuzzy_contents;
4985 if query_str.is_some_and(|query| query.is_empty()) {
4986 return BTreeMap::default();
4987 }
4988
4989 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4990 language,
4991 override_id: None,
4992 }));
4993
4994 let mut query_ix = 0;
4995 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4996 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4997
4998 let mut words = BTreeMap::default();
4999 let mut current_word_start_ix = None;
5000 let mut chunk_ix = query.range.start;
5001 for chunk in self.chunks(query.range, false) {
5002 for (i, c) in chunk.text.char_indices() {
5003 let ix = chunk_ix + i;
5004 if classifier.is_word(c) {
5005 if current_word_start_ix.is_none() {
5006 current_word_start_ix = Some(ix);
5007 }
5008
5009 if let Some(query_chars) = &query_chars
5010 && query_ix < query_len
5011 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5012 {
5013 query_ix += 1;
5014 }
5015 continue;
5016 } else if let Some(word_start) = current_word_start_ix.take()
5017 && query_ix == query_len
5018 {
5019 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5020 let mut word_text = self.text_for_range(word_start..ix).peekable();
5021 let first_char = word_text
5022 .peek()
5023 .and_then(|first_chunk| first_chunk.chars().next());
5024 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5025 if !query.skip_digits
5026 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5027 {
5028 words.insert(word_text.collect(), word_range);
5029 }
5030 }
5031 query_ix = 0;
5032 }
5033 chunk_ix += chunk.text.len();
5034 }
5035
5036 words
5037 }
5038}
5039
5040pub struct WordsQuery<'a> {
5041 /// Only returns words with all chars from the fuzzy string in them.
5042 pub fuzzy_contents: Option<&'a str>,
5043 /// Skips words that start with a digit.
5044 pub skip_digits: bool,
5045 /// Buffer offset range, to look for words.
5046 pub range: Range<usize>,
5047}
5048
5049fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5050 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5051}
5052
5053fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5054 let mut result = IndentSize::spaces(0);
5055 for c in text {
5056 let kind = match c {
5057 ' ' => IndentKind::Space,
5058 '\t' => IndentKind::Tab,
5059 _ => break,
5060 };
5061 if result.len == 0 {
5062 result.kind = kind;
5063 }
5064 result.len += 1;
5065 }
5066 result
5067}
5068
5069impl Clone for BufferSnapshot {
5070 fn clone(&self) -> Self {
5071 Self {
5072 text: self.text.clone(),
5073 syntax: self.syntax.clone(),
5074 file: self.file.clone(),
5075 remote_selections: self.remote_selections.clone(),
5076 diagnostics: self.diagnostics.clone(),
5077 language: self.language.clone(),
5078 tree_sitter_data: self.tree_sitter_data.clone(),
5079 non_text_state_update_count: self.non_text_state_update_count,
5080 }
5081 }
5082}
5083
5084impl Deref for BufferSnapshot {
5085 type Target = text::BufferSnapshot;
5086
5087 fn deref(&self) -> &Self::Target {
5088 &self.text
5089 }
5090}
5091
5092unsafe impl Send for BufferChunks<'_> {}
5093
5094impl<'a> BufferChunks<'a> {
5095 pub(crate) fn new(
5096 text: &'a Rope,
5097 range: Range<usize>,
5098 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5099 diagnostics: bool,
5100 buffer_snapshot: Option<&'a BufferSnapshot>,
5101 ) -> Self {
5102 let mut highlights = None;
5103 if let Some((captures, highlight_maps)) = syntax {
5104 highlights = Some(BufferChunkHighlights {
5105 captures,
5106 next_capture: None,
5107 stack: Default::default(),
5108 highlight_maps,
5109 })
5110 }
5111
5112 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5113 let chunks = text.chunks_in_range(range.clone());
5114
5115 let mut this = BufferChunks {
5116 range,
5117 buffer_snapshot,
5118 chunks,
5119 diagnostic_endpoints,
5120 error_depth: 0,
5121 warning_depth: 0,
5122 information_depth: 0,
5123 hint_depth: 0,
5124 unnecessary_depth: 0,
5125 underline: true,
5126 highlights,
5127 };
5128 this.initialize_diagnostic_endpoints();
5129 this
5130 }
5131
5132 /// Seeks to the given byte offset in the buffer.
5133 pub fn seek(&mut self, range: Range<usize>) {
5134 let old_range = std::mem::replace(&mut self.range, range.clone());
5135 self.chunks.set_range(self.range.clone());
5136 if let Some(highlights) = self.highlights.as_mut() {
5137 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5138 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5139 highlights
5140 .stack
5141 .retain(|(end_offset, _)| *end_offset > range.start);
5142 if let Some(capture) = &highlights.next_capture
5143 && range.start >= capture.node.start_byte()
5144 {
5145 let next_capture_end = capture.node.end_byte();
5146 if range.start < next_capture_end {
5147 highlights.stack.push((
5148 next_capture_end,
5149 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5150 ));
5151 }
5152 highlights.next_capture.take();
5153 }
5154 } else if let Some(snapshot) = self.buffer_snapshot {
5155 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5156 *highlights = BufferChunkHighlights {
5157 captures,
5158 next_capture: None,
5159 stack: Default::default(),
5160 highlight_maps,
5161 };
5162 } else {
5163 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5164 // Seeking such BufferChunks is not supported.
5165 debug_assert!(
5166 false,
5167 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5168 );
5169 }
5170
5171 highlights.captures.set_byte_range(self.range.clone());
5172 self.initialize_diagnostic_endpoints();
5173 }
5174 }
5175
5176 fn initialize_diagnostic_endpoints(&mut self) {
5177 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5178 && let Some(buffer) = self.buffer_snapshot
5179 {
5180 let mut diagnostic_endpoints = Vec::new();
5181 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5182 diagnostic_endpoints.push(DiagnosticEndpoint {
5183 offset: entry.range.start,
5184 is_start: true,
5185 severity: entry.diagnostic.severity,
5186 is_unnecessary: entry.diagnostic.is_unnecessary,
5187 underline: entry.diagnostic.underline,
5188 });
5189 diagnostic_endpoints.push(DiagnosticEndpoint {
5190 offset: entry.range.end,
5191 is_start: false,
5192 severity: entry.diagnostic.severity,
5193 is_unnecessary: entry.diagnostic.is_unnecessary,
5194 underline: entry.diagnostic.underline,
5195 });
5196 }
5197 diagnostic_endpoints
5198 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5199 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5200 self.hint_depth = 0;
5201 self.error_depth = 0;
5202 self.warning_depth = 0;
5203 self.information_depth = 0;
5204 }
5205 }
5206
5207 /// The current byte offset in the buffer.
5208 pub fn offset(&self) -> usize {
5209 self.range.start
5210 }
5211
5212 pub fn range(&self) -> Range<usize> {
5213 self.range.clone()
5214 }
5215
5216 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5217 let depth = match endpoint.severity {
5218 DiagnosticSeverity::ERROR => &mut self.error_depth,
5219 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5220 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5221 DiagnosticSeverity::HINT => &mut self.hint_depth,
5222 _ => return,
5223 };
5224 if endpoint.is_start {
5225 *depth += 1;
5226 } else {
5227 *depth -= 1;
5228 }
5229
5230 if endpoint.is_unnecessary {
5231 if endpoint.is_start {
5232 self.unnecessary_depth += 1;
5233 } else {
5234 self.unnecessary_depth -= 1;
5235 }
5236 }
5237 }
5238
5239 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5240 if self.error_depth > 0 {
5241 Some(DiagnosticSeverity::ERROR)
5242 } else if self.warning_depth > 0 {
5243 Some(DiagnosticSeverity::WARNING)
5244 } else if self.information_depth > 0 {
5245 Some(DiagnosticSeverity::INFORMATION)
5246 } else if self.hint_depth > 0 {
5247 Some(DiagnosticSeverity::HINT)
5248 } else {
5249 None
5250 }
5251 }
5252
5253 fn current_code_is_unnecessary(&self) -> bool {
5254 self.unnecessary_depth > 0
5255 }
5256}
5257
5258impl<'a> Iterator for BufferChunks<'a> {
5259 type Item = Chunk<'a>;
5260
5261 fn next(&mut self) -> Option<Self::Item> {
5262 let mut next_capture_start = usize::MAX;
5263 let mut next_diagnostic_endpoint = usize::MAX;
5264
5265 if let Some(highlights) = self.highlights.as_mut() {
5266 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5267 if *parent_capture_end <= self.range.start {
5268 highlights.stack.pop();
5269 } else {
5270 break;
5271 }
5272 }
5273
5274 if highlights.next_capture.is_none() {
5275 highlights.next_capture = highlights.captures.next();
5276 }
5277
5278 while let Some(capture) = highlights.next_capture.as_ref() {
5279 if self.range.start < capture.node.start_byte() {
5280 next_capture_start = capture.node.start_byte();
5281 break;
5282 } else {
5283 let highlight_id =
5284 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5285 highlights
5286 .stack
5287 .push((capture.node.end_byte(), highlight_id));
5288 highlights.next_capture = highlights.captures.next();
5289 }
5290 }
5291 }
5292
5293 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5294 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5295 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5296 if endpoint.offset <= self.range.start {
5297 self.update_diagnostic_depths(endpoint);
5298 diagnostic_endpoints.next();
5299 self.underline = endpoint.underline;
5300 } else {
5301 next_diagnostic_endpoint = endpoint.offset;
5302 break;
5303 }
5304 }
5305 }
5306 self.diagnostic_endpoints = diagnostic_endpoints;
5307
5308 if let Some(ChunkBitmaps {
5309 text: chunk,
5310 chars: chars_map,
5311 tabs,
5312 }) = self.chunks.peek_with_bitmaps()
5313 {
5314 let chunk_start = self.range.start;
5315 let mut chunk_end = (self.chunks.offset() + chunk.len())
5316 .min(next_capture_start)
5317 .min(next_diagnostic_endpoint);
5318 let mut highlight_id = None;
5319 if let Some(highlights) = self.highlights.as_ref()
5320 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5321 {
5322 chunk_end = chunk_end.min(*parent_capture_end);
5323 highlight_id = Some(*parent_highlight_id);
5324 }
5325 let bit_start = chunk_start - self.chunks.offset();
5326 let bit_end = chunk_end - self.chunks.offset();
5327
5328 let slice = &chunk[bit_start..bit_end];
5329
5330 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5331 let tabs = (tabs >> bit_start) & mask;
5332 let chars = (chars_map >> bit_start) & mask;
5333
5334 self.range.start = chunk_end;
5335 if self.range.start == self.chunks.offset() + chunk.len() {
5336 self.chunks.next().unwrap();
5337 }
5338
5339 Some(Chunk {
5340 text: slice,
5341 syntax_highlight_id: highlight_id,
5342 underline: self.underline,
5343 diagnostic_severity: self.current_diagnostic_severity(),
5344 is_unnecessary: self.current_code_is_unnecessary(),
5345 tabs,
5346 chars,
5347 ..Chunk::default()
5348 })
5349 } else {
5350 None
5351 }
5352 }
5353}
5354
5355impl operation_queue::Operation for Operation {
5356 fn lamport_timestamp(&self) -> clock::Lamport {
5357 match self {
5358 Operation::Buffer(_) => {
5359 unreachable!("buffer operations should never be deferred at this layer")
5360 }
5361 Operation::UpdateDiagnostics {
5362 lamport_timestamp, ..
5363 }
5364 | Operation::UpdateSelections {
5365 lamport_timestamp, ..
5366 }
5367 | Operation::UpdateCompletionTriggers {
5368 lamport_timestamp, ..
5369 }
5370 | Operation::UpdateLineEnding {
5371 lamport_timestamp, ..
5372 } => *lamport_timestamp,
5373 }
5374 }
5375}
5376
5377impl Default for Diagnostic {
5378 fn default() -> Self {
5379 Self {
5380 source: Default::default(),
5381 source_kind: DiagnosticSourceKind::Other,
5382 code: None,
5383 code_description: None,
5384 severity: DiagnosticSeverity::ERROR,
5385 message: Default::default(),
5386 markdown: None,
5387 group_id: 0,
5388 is_primary: false,
5389 is_disk_based: false,
5390 is_unnecessary: false,
5391 underline: true,
5392 data: None,
5393 }
5394 }
5395}
5396
5397impl IndentSize {
5398 /// Returns an [`IndentSize`] representing the given spaces.
5399 pub fn spaces(len: u32) -> Self {
5400 Self {
5401 len,
5402 kind: IndentKind::Space,
5403 }
5404 }
5405
5406 /// Returns an [`IndentSize`] representing a tab.
5407 pub fn tab() -> Self {
5408 Self {
5409 len: 1,
5410 kind: IndentKind::Tab,
5411 }
5412 }
5413
5414 /// An iterator over the characters represented by this [`IndentSize`].
5415 pub fn chars(&self) -> impl Iterator<Item = char> {
5416 iter::repeat(self.char()).take(self.len as usize)
5417 }
5418
5419 /// The character representation of this [`IndentSize`].
5420 pub fn char(&self) -> char {
5421 match self.kind {
5422 IndentKind::Space => ' ',
5423 IndentKind::Tab => '\t',
5424 }
5425 }
5426
5427 /// Consumes the current [`IndentSize`] and returns a new one that has
5428 /// been shrunk or enlarged by the given size along the given direction.
5429 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5430 match direction {
5431 Ordering::Less => {
5432 if self.kind == size.kind && self.len >= size.len {
5433 self.len -= size.len;
5434 }
5435 }
5436 Ordering::Equal => {}
5437 Ordering::Greater => {
5438 if self.len == 0 {
5439 self = size;
5440 } else if self.kind == size.kind {
5441 self.len += size.len;
5442 }
5443 }
5444 }
5445 self
5446 }
5447
5448 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5449 match self.kind {
5450 IndentKind::Space => self.len as usize,
5451 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5452 }
5453 }
5454}
5455
5456#[cfg(any(test, feature = "test-support"))]
5457pub struct TestFile {
5458 pub path: Arc<RelPath>,
5459 pub root_name: String,
5460 pub local_root: Option<PathBuf>,
5461}
5462
5463#[cfg(any(test, feature = "test-support"))]
5464impl File for TestFile {
5465 fn path(&self) -> &Arc<RelPath> {
5466 &self.path
5467 }
5468
5469 fn full_path(&self, _: &gpui::App) -> PathBuf {
5470 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5471 }
5472
5473 fn as_local(&self) -> Option<&dyn LocalFile> {
5474 if self.local_root.is_some() {
5475 Some(self)
5476 } else {
5477 None
5478 }
5479 }
5480
5481 fn disk_state(&self) -> DiskState {
5482 unimplemented!()
5483 }
5484
5485 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5486 self.path().file_name().unwrap_or(self.root_name.as_ref())
5487 }
5488
5489 fn worktree_id(&self, _: &App) -> WorktreeId {
5490 WorktreeId::from_usize(0)
5491 }
5492
5493 fn to_proto(&self, _: &App) -> rpc::proto::File {
5494 unimplemented!()
5495 }
5496
5497 fn is_private(&self) -> bool {
5498 false
5499 }
5500
5501 fn path_style(&self, _cx: &App) -> PathStyle {
5502 PathStyle::local()
5503 }
5504}
5505
5506#[cfg(any(test, feature = "test-support"))]
5507impl LocalFile for TestFile {
5508 fn abs_path(&self, _cx: &App) -> PathBuf {
5509 PathBuf::from(self.local_root.as_ref().unwrap())
5510 .join(&self.root_name)
5511 .join(self.path.as_std_path())
5512 }
5513
5514 fn load(&self, _cx: &App) -> Task<Result<String>> {
5515 unimplemented!()
5516 }
5517
5518 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5519 unimplemented!()
5520 }
5521}
5522
5523pub(crate) fn contiguous_ranges(
5524 values: impl Iterator<Item = u32>,
5525 max_len: usize,
5526) -> impl Iterator<Item = Range<u32>> {
5527 let mut values = values;
5528 let mut current_range: Option<Range<u32>> = None;
5529 std::iter::from_fn(move || {
5530 loop {
5531 if let Some(value) = values.next() {
5532 if let Some(range) = &mut current_range
5533 && value == range.end
5534 && range.len() < max_len
5535 {
5536 range.end += 1;
5537 continue;
5538 }
5539
5540 let prev_range = current_range.clone();
5541 current_range = Some(value..(value + 1));
5542 if prev_range.is_some() {
5543 return prev_range;
5544 }
5545 } else {
5546 return current_range.take();
5547 }
5548 }
5549 })
5550}
5551
5552#[derive(Default, Debug)]
5553pub struct CharClassifier {
5554 scope: Option<LanguageScope>,
5555 scope_context: Option<CharScopeContext>,
5556 ignore_punctuation: bool,
5557}
5558
5559impl CharClassifier {
5560 pub fn new(scope: Option<LanguageScope>) -> Self {
5561 Self {
5562 scope,
5563 scope_context: None,
5564 ignore_punctuation: false,
5565 }
5566 }
5567
5568 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5569 Self {
5570 scope_context,
5571 ..self
5572 }
5573 }
5574
5575 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5576 Self {
5577 ignore_punctuation,
5578 ..self
5579 }
5580 }
5581
5582 pub fn is_whitespace(&self, c: char) -> bool {
5583 self.kind(c) == CharKind::Whitespace
5584 }
5585
5586 pub fn is_word(&self, c: char) -> bool {
5587 self.kind(c) == CharKind::Word
5588 }
5589
5590 pub fn is_punctuation(&self, c: char) -> bool {
5591 self.kind(c) == CharKind::Punctuation
5592 }
5593
5594 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5595 if c.is_alphanumeric() || c == '_' {
5596 return CharKind::Word;
5597 }
5598
5599 if let Some(scope) = &self.scope {
5600 let characters = match self.scope_context {
5601 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5602 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5603 None => scope.word_characters(),
5604 };
5605 if let Some(characters) = characters
5606 && characters.contains(&c)
5607 {
5608 return CharKind::Word;
5609 }
5610 }
5611
5612 if c.is_whitespace() {
5613 return CharKind::Whitespace;
5614 }
5615
5616 if ignore_punctuation {
5617 CharKind::Word
5618 } else {
5619 CharKind::Punctuation
5620 }
5621 }
5622
5623 pub fn kind(&self, c: char) -> CharKind {
5624 self.kind_with(c, self.ignore_punctuation)
5625 }
5626}
5627
5628/// Find all of the ranges of whitespace that occur at the ends of lines
5629/// in the given rope.
5630///
5631/// This could also be done with a regex search, but this implementation
5632/// avoids copying text.
5633pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5634 let mut ranges = Vec::new();
5635
5636 let mut offset = 0;
5637 let mut prev_chunk_trailing_whitespace_range = 0..0;
5638 for chunk in rope.chunks() {
5639 let mut prev_line_trailing_whitespace_range = 0..0;
5640 for (i, line) in chunk.split('\n').enumerate() {
5641 let line_end_offset = offset + line.len();
5642 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5643 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5644
5645 if i == 0 && trimmed_line_len == 0 {
5646 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5647 }
5648 if !prev_line_trailing_whitespace_range.is_empty() {
5649 ranges.push(prev_line_trailing_whitespace_range);
5650 }
5651
5652 offset = line_end_offset + 1;
5653 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5654 }
5655
5656 offset -= 1;
5657 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5658 }
5659
5660 if !prev_chunk_trailing_whitespace_range.is_empty() {
5661 ranges.push(prev_chunk_trailing_whitespace_range);
5662 }
5663
5664 ranges
5665}