1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use itertools::Itertools;
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet, hash_map},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
135}
136
137#[derive(Debug, Clone)]
138pub struct TreeSitterData {
139 chunks: RowChunks,
140 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
141}
142
143const MAX_ROWS_IN_A_CHUNK: u32 = 50;
144
145impl TreeSitterData {
146 fn clear(&mut self) {
147 self.brackets_by_chunks = vec![None; self.chunks.len()];
148 }
149
150 fn new(snapshot: text::BufferSnapshot) -> Self {
151 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
152 Self {
153 brackets_by_chunks: vec![None; chunks.len()],
154 chunks,
155 }
156 }
157}
158
159#[derive(Copy, Clone, Debug, PartialEq, Eq)]
160pub enum ParseStatus {
161 Idle,
162 Parsing,
163}
164
165struct BufferBranchState {
166 base_buffer: Entity<Buffer>,
167 merged_operations: Vec<Lamport>,
168}
169
170/// An immutable, cheaply cloneable representation of a fixed
171/// state of a buffer.
172pub struct BufferSnapshot {
173 pub text: text::BufferSnapshot,
174 pub syntax: SyntaxSnapshot,
175 file: Option<Arc<dyn File>>,
176 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
177 remote_selections: TreeMap<ReplicaId, SelectionSet>,
178 language: Option<Arc<Language>>,
179 non_text_state_update_count: usize,
180 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
181}
182
183/// The kind and amount of indentation in a particular line. For now,
184/// assumes that indentation is all the same character.
185#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
186pub struct IndentSize {
187 /// The number of bytes that comprise the indentation.
188 pub len: u32,
189 /// The kind of whitespace used for indentation.
190 pub kind: IndentKind,
191}
192
193/// A whitespace character that's used for indentation.
194#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
195pub enum IndentKind {
196 /// An ASCII space character.
197 #[default]
198 Space,
199 /// An ASCII tab character.
200 Tab,
201}
202
203/// The shape of a selection cursor.
204#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
205pub enum CursorShape {
206 /// A vertical bar
207 #[default]
208 Bar,
209 /// A block that surrounds the following character
210 Block,
211 /// An underline that runs along the following character
212 Underline,
213 /// A box drawn around the following character
214 Hollow,
215}
216
217impl From<settings::CursorShape> for CursorShape {
218 fn from(shape: settings::CursorShape) -> Self {
219 match shape {
220 settings::CursorShape::Bar => CursorShape::Bar,
221 settings::CursorShape::Block => CursorShape::Block,
222 settings::CursorShape::Underline => CursorShape::Underline,
223 settings::CursorShape::Hollow => CursorShape::Hollow,
224 }
225 }
226}
227
228#[derive(Clone, Debug)]
229struct SelectionSet {
230 line_mode: bool,
231 cursor_shape: CursorShape,
232 selections: Arc<[Selection<Anchor>]>,
233 lamport_timestamp: clock::Lamport,
234}
235
236/// A diagnostic associated with a certain range of a buffer.
237#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
238pub struct Diagnostic {
239 /// The name of the service that produced this diagnostic.
240 pub source: Option<String>,
241 /// A machine-readable code that identifies this diagnostic.
242 pub code: Option<NumberOrString>,
243 pub code_description: Option<lsp::Uri>,
244 /// Whether this diagnostic is a hint, warning, or error.
245 pub severity: DiagnosticSeverity,
246 /// The human-readable message associated with this diagnostic.
247 pub message: String,
248 /// The human-readable message (in markdown format)
249 pub markdown: Option<String>,
250 /// An id that identifies the group to which this diagnostic belongs.
251 ///
252 /// When a language server produces a diagnostic with
253 /// one or more associated diagnostics, those diagnostics are all
254 /// assigned a single group ID.
255 pub group_id: usize,
256 /// Whether this diagnostic is the primary diagnostic for its group.
257 ///
258 /// In a given group, the primary diagnostic is the top-level diagnostic
259 /// returned by the language server. The non-primary diagnostics are the
260 /// associated diagnostics.
261 pub is_primary: bool,
262 /// Whether this diagnostic is considered to originate from an analysis of
263 /// files on disk, as opposed to any unsaved buffer contents. This is a
264 /// property of a given diagnostic source, and is configured for a given
265 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
266 /// for the language server.
267 pub is_disk_based: bool,
268 /// Whether this diagnostic marks unnecessary code.
269 pub is_unnecessary: bool,
270 /// Quick separation of diagnostics groups based by their source.
271 pub source_kind: DiagnosticSourceKind,
272 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
273 pub data: Option<Value>,
274 /// Whether to underline the corresponding text range in the editor.
275 pub underline: bool,
276}
277
278#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
279pub enum DiagnosticSourceKind {
280 Pulled,
281 Pushed,
282 Other,
283}
284
285/// An operation used to synchronize this buffer with its other replicas.
286#[derive(Clone, Debug, PartialEq)]
287pub enum Operation {
288 /// A text operation.
289 Buffer(text::Operation),
290
291 /// An update to the buffer's diagnostics.
292 UpdateDiagnostics {
293 /// The id of the language server that produced the new diagnostics.
294 server_id: LanguageServerId,
295 /// The diagnostics.
296 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
297 /// The buffer's lamport timestamp.
298 lamport_timestamp: clock::Lamport,
299 },
300
301 /// An update to the most recent selections in this buffer.
302 UpdateSelections {
303 /// The selections.
304 selections: Arc<[Selection<Anchor>]>,
305 /// The buffer's lamport timestamp.
306 lamport_timestamp: clock::Lamport,
307 /// Whether the selections are in 'line mode'.
308 line_mode: bool,
309 /// The [`CursorShape`] associated with these selections.
310 cursor_shape: CursorShape,
311 },
312
313 /// An update to the characters that should trigger autocompletion
314 /// for this buffer.
315 UpdateCompletionTriggers {
316 /// The characters that trigger autocompletion.
317 triggers: Vec<String>,
318 /// The buffer's lamport timestamp.
319 lamport_timestamp: clock::Lamport,
320 /// The language server ID.
321 server_id: LanguageServerId,
322 },
323
324 /// An update to the line ending type of this buffer.
325 UpdateLineEnding {
326 /// The line ending type.
327 line_ending: LineEnding,
328 /// The buffer's lamport timestamp.
329 lamport_timestamp: clock::Lamport,
330 },
331}
332
333/// An event that occurs in a buffer.
334#[derive(Clone, Debug, PartialEq)]
335pub enum BufferEvent {
336 /// The buffer was changed in a way that must be
337 /// propagated to its other replicas.
338 Operation {
339 operation: Operation,
340 is_local: bool,
341 },
342 /// The buffer was edited.
343 Edited,
344 /// The buffer's `dirty` bit changed.
345 DirtyChanged,
346 /// The buffer was saved.
347 Saved,
348 /// The buffer's file was changed on disk.
349 FileHandleChanged,
350 /// The buffer was reloaded.
351 Reloaded,
352 /// The buffer is in need of a reload
353 ReloadNeeded,
354 /// The buffer's language was changed.
355 LanguageChanged,
356 /// The buffer's syntax trees were updated.
357 Reparsed,
358 /// The buffer's diagnostics were updated.
359 DiagnosticsUpdated,
360 /// The buffer gained or lost editing capabilities.
361 CapabilityChanged,
362}
363
364/// The file associated with a buffer.
365pub trait File: Send + Sync + Any {
366 /// Returns the [`LocalFile`] associated with this file, if the
367 /// file is local.
368 fn as_local(&self) -> Option<&dyn LocalFile>;
369
370 /// Returns whether this file is local.
371 fn is_local(&self) -> bool {
372 self.as_local().is_some()
373 }
374
375 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
376 /// only available in some states, such as modification time.
377 fn disk_state(&self) -> DiskState;
378
379 /// Returns the path of this file relative to the worktree's root directory.
380 fn path(&self) -> &Arc<RelPath>;
381
382 /// Returns the path of this file relative to the worktree's parent directory (this means it
383 /// includes the name of the worktree's root folder).
384 fn full_path(&self, cx: &App) -> PathBuf;
385
386 /// Returns the path style of this file.
387 fn path_style(&self, cx: &App) -> PathStyle;
388
389 /// Returns the last component of this handle's absolute path. If this handle refers to the root
390 /// of its worktree, then this method will return the name of the worktree itself.
391 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
392
393 /// Returns the id of the worktree to which this file belongs.
394 ///
395 /// This is needed for looking up project-specific settings.
396 fn worktree_id(&self, cx: &App) -> WorktreeId;
397
398 /// Converts this file into a protobuf message.
399 fn to_proto(&self, cx: &App) -> rpc::proto::File;
400
401 /// Return whether Zed considers this to be a private file.
402 fn is_private(&self) -> bool;
403}
404
405/// The file's storage status - whether it's stored (`Present`), and if so when it was last
406/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
407/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
408/// indicator for new files.
409#[derive(Copy, Clone, Debug, PartialEq)]
410pub enum DiskState {
411 /// File created in Zed that has not been saved.
412 New,
413 /// File present on the filesystem.
414 Present { mtime: MTime },
415 /// Deleted file that was previously present.
416 Deleted,
417}
418
419impl DiskState {
420 /// Returns the file's last known modification time on disk.
421 pub fn mtime(self) -> Option<MTime> {
422 match self {
423 DiskState::New => None,
424 DiskState::Present { mtime } => Some(mtime),
425 DiskState::Deleted => None,
426 }
427 }
428
429 pub fn exists(&self) -> bool {
430 match self {
431 DiskState::New => false,
432 DiskState::Present { .. } => true,
433 DiskState::Deleted => false,
434 }
435 }
436}
437
438/// The file associated with a buffer, in the case where the file is on the local disk.
439pub trait LocalFile: File {
440 /// Returns the absolute path of this file
441 fn abs_path(&self, cx: &App) -> PathBuf;
442
443 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
444 fn load(&self, cx: &App) -> Task<Result<String>>;
445
446 /// Loads the file's contents from disk.
447 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
448}
449
450/// The auto-indent behavior associated with an editing operation.
451/// For some editing operations, each affected line of text has its
452/// indentation recomputed. For other operations, the entire block
453/// of edited text is adjusted uniformly.
454#[derive(Clone, Debug)]
455pub enum AutoindentMode {
456 /// Indent each line of inserted text.
457 EachLine,
458 /// Apply the same indentation adjustment to all of the lines
459 /// in a given insertion.
460 Block {
461 /// The original indentation column of the first line of each
462 /// insertion, if it has been copied.
463 ///
464 /// Knowing this makes it possible to preserve the relative indentation
465 /// of every line in the insertion from when it was copied.
466 ///
467 /// If the original indent column is `a`, and the first line of insertion
468 /// is then auto-indented to column `b`, then every other line of
469 /// the insertion will be auto-indented to column `b - a`
470 original_indent_columns: Vec<Option<u32>>,
471 },
472}
473
474#[derive(Clone)]
475struct AutoindentRequest {
476 before_edit: BufferSnapshot,
477 entries: Vec<AutoindentRequestEntry>,
478 is_block_mode: bool,
479 ignore_empty_lines: bool,
480}
481
482#[derive(Debug, Clone)]
483struct AutoindentRequestEntry {
484 /// A range of the buffer whose indentation should be adjusted.
485 range: Range<Anchor>,
486 /// Whether or not these lines should be considered brand new, for the
487 /// purpose of auto-indent. When text is not new, its indentation will
488 /// only be adjusted if the suggested indentation level has *changed*
489 /// since the edit was made.
490 first_line_is_new: bool,
491 indent_size: IndentSize,
492 original_indent_column: Option<u32>,
493}
494
495#[derive(Debug)]
496struct IndentSuggestion {
497 basis_row: u32,
498 delta: Ordering,
499 within_error: bool,
500}
501
502struct BufferChunkHighlights<'a> {
503 captures: SyntaxMapCaptures<'a>,
504 next_capture: Option<SyntaxMapCapture<'a>>,
505 stack: Vec<(usize, HighlightId)>,
506 highlight_maps: Vec<HighlightMap>,
507}
508
509/// An iterator that yields chunks of a buffer's text, along with their
510/// syntax highlights and diagnostic status.
511pub struct BufferChunks<'a> {
512 buffer_snapshot: Option<&'a BufferSnapshot>,
513 range: Range<usize>,
514 chunks: text::Chunks<'a>,
515 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
516 error_depth: usize,
517 warning_depth: usize,
518 information_depth: usize,
519 hint_depth: usize,
520 unnecessary_depth: usize,
521 underline: bool,
522 highlights: Option<BufferChunkHighlights<'a>>,
523}
524
525/// A chunk of a buffer's text, along with its syntax highlight and
526/// diagnostic status.
527#[derive(Clone, Debug, Default)]
528pub struct Chunk<'a> {
529 /// The text of the chunk.
530 pub text: &'a str,
531 /// The syntax highlighting style of the chunk.
532 pub syntax_highlight_id: Option<HighlightId>,
533 /// The highlight style that has been applied to this chunk in
534 /// the editor.
535 pub highlight_style: Option<HighlightStyle>,
536 /// The severity of diagnostic associated with this chunk, if any.
537 pub diagnostic_severity: Option<DiagnosticSeverity>,
538 /// A bitset of which characters are tabs in this string.
539 pub tabs: u128,
540 /// Bitmap of character indices in this chunk
541 pub chars: u128,
542 /// Whether this chunk of text is marked as unnecessary.
543 pub is_unnecessary: bool,
544 /// Whether this chunk of text was originally a tab character.
545 pub is_tab: bool,
546 /// Whether this chunk of text was originally an inlay.
547 pub is_inlay: bool,
548 /// Whether to underline the corresponding text range in the editor.
549 pub underline: bool,
550}
551
552/// A set of edits to a given version of a buffer, computed asynchronously.
553#[derive(Debug)]
554pub struct Diff {
555 pub base_version: clock::Global,
556 pub line_ending: LineEnding,
557 pub edits: Vec<(Range<usize>, Arc<str>)>,
558}
559
560#[derive(Debug, Clone, Copy)]
561pub(crate) struct DiagnosticEndpoint {
562 offset: usize,
563 is_start: bool,
564 underline: bool,
565 severity: DiagnosticSeverity,
566 is_unnecessary: bool,
567}
568
569/// A class of characters, used for characterizing a run of text.
570#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
571pub enum CharKind {
572 /// Whitespace.
573 Whitespace,
574 /// Punctuation.
575 Punctuation,
576 /// Word.
577 Word,
578}
579
580/// Context for character classification within a specific scope.
581#[derive(Copy, Clone, Eq, PartialEq, Debug)]
582pub enum CharScopeContext {
583 /// Character classification for completion queries.
584 ///
585 /// This context treats certain characters as word constituents that would
586 /// normally be considered punctuation, such as '-' in Tailwind classes
587 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
588 Completion,
589 /// Character classification for linked edits.
590 ///
591 /// This context handles characters that should be treated as part of
592 /// identifiers during linked editing operations, such as '.' in JSX
593 /// component names like `<Animated.View>`.
594 LinkedEdit,
595}
596
597/// A runnable is a set of data about a region that could be resolved into a task
598pub struct Runnable {
599 pub tags: SmallVec<[RunnableTag; 1]>,
600 pub language: Arc<Language>,
601 pub buffer: BufferId,
602}
603
604#[derive(Default, Clone, Debug)]
605pub struct HighlightedText {
606 pub text: SharedString,
607 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
608}
609
610#[derive(Default, Debug)]
611struct HighlightedTextBuilder {
612 pub text: String,
613 highlights: Vec<(Range<usize>, HighlightStyle)>,
614}
615
616impl HighlightedText {
617 pub fn from_buffer_range<T: ToOffset>(
618 range: Range<T>,
619 snapshot: &text::BufferSnapshot,
620 syntax_snapshot: &SyntaxSnapshot,
621 override_style: Option<HighlightStyle>,
622 syntax_theme: &SyntaxTheme,
623 ) -> Self {
624 let mut highlighted_text = HighlightedTextBuilder::default();
625 highlighted_text.add_text_from_buffer_range(
626 range,
627 snapshot,
628 syntax_snapshot,
629 override_style,
630 syntax_theme,
631 );
632 highlighted_text.build()
633 }
634
635 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
636 gpui::StyledText::new(self.text.clone())
637 .with_default_highlights(default_style, self.highlights.iter().cloned())
638 }
639
640 /// Returns the first line without leading whitespace unless highlighted
641 /// and a boolean indicating if there are more lines after
642 pub fn first_line_preview(self) -> (Self, bool) {
643 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
644 let first_line = &self.text[..newline_ix];
645
646 // Trim leading whitespace, unless an edit starts prior to it.
647 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
648 if let Some((first_highlight_range, _)) = self.highlights.first() {
649 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
650 }
651
652 let preview_text = &first_line[preview_start_ix..];
653 let preview_highlights = self
654 .highlights
655 .into_iter()
656 .skip_while(|(range, _)| range.end <= preview_start_ix)
657 .take_while(|(range, _)| range.start < newline_ix)
658 .filter_map(|(mut range, highlight)| {
659 range.start = range.start.saturating_sub(preview_start_ix);
660 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
661 if range.is_empty() {
662 None
663 } else {
664 Some((range, highlight))
665 }
666 });
667
668 let preview = Self {
669 text: SharedString::new(preview_text),
670 highlights: preview_highlights.collect(),
671 };
672
673 (preview, self.text.len() > newline_ix)
674 }
675}
676
677impl HighlightedTextBuilder {
678 pub fn build(self) -> HighlightedText {
679 HighlightedText {
680 text: self.text.into(),
681 highlights: self.highlights,
682 }
683 }
684
685 pub fn add_text_from_buffer_range<T: ToOffset>(
686 &mut self,
687 range: Range<T>,
688 snapshot: &text::BufferSnapshot,
689 syntax_snapshot: &SyntaxSnapshot,
690 override_style: Option<HighlightStyle>,
691 syntax_theme: &SyntaxTheme,
692 ) {
693 let range = range.to_offset(snapshot);
694 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
695 let start = self.text.len();
696 self.text.push_str(chunk.text);
697 let end = self.text.len();
698
699 if let Some(highlight_style) = chunk
700 .syntax_highlight_id
701 .and_then(|id| id.style(syntax_theme))
702 {
703 let highlight_style = override_style.map_or(highlight_style, |override_style| {
704 highlight_style.highlight(override_style)
705 });
706 self.highlights.push((start..end, highlight_style));
707 } else if let Some(override_style) = override_style {
708 self.highlights.push((start..end, override_style));
709 }
710 }
711 }
712
713 fn highlighted_chunks<'a>(
714 range: Range<usize>,
715 snapshot: &'a text::BufferSnapshot,
716 syntax_snapshot: &'a SyntaxSnapshot,
717 ) -> BufferChunks<'a> {
718 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
719 grammar
720 .highlights_config
721 .as_ref()
722 .map(|config| &config.query)
723 });
724
725 let highlight_maps = captures
726 .grammars()
727 .iter()
728 .map(|grammar| grammar.highlight_map())
729 .collect();
730
731 BufferChunks::new(
732 snapshot.as_rope(),
733 range,
734 Some((captures, highlight_maps)),
735 false,
736 None,
737 )
738 }
739}
740
741#[derive(Clone)]
742pub struct EditPreview {
743 old_snapshot: text::BufferSnapshot,
744 applied_edits_snapshot: text::BufferSnapshot,
745 syntax_snapshot: SyntaxSnapshot,
746}
747
748impl EditPreview {
749 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
750 let (first, _) = edits.first()?;
751 let (last, _) = edits.last()?;
752
753 let start = first.start.to_point(&self.old_snapshot);
754 let old_end = last.end.to_point(&self.old_snapshot);
755 let new_end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 let start = Point::new(start.row.saturating_sub(3), 0);
761 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
762 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
763
764 Some(unified_diff(
765 &self
766 .old_snapshot
767 .text_for_range(start..old_end)
768 .collect::<String>(),
769 &self
770 .applied_edits_snapshot
771 .text_for_range(start..new_end)
772 .collect::<String>(),
773 ))
774 }
775
776 pub fn highlight_edits(
777 &self,
778 current_snapshot: &BufferSnapshot,
779 edits: &[(Range<Anchor>, impl AsRef<str>)],
780 include_deletions: bool,
781 cx: &App,
782 ) -> HighlightedText {
783 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
784 return HighlightedText::default();
785 };
786
787 let mut highlighted_text = HighlightedTextBuilder::default();
788
789 let visible_range_in_preview_snapshot =
790 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
791 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
792
793 let insertion_highlight_style = HighlightStyle {
794 background_color: Some(cx.theme().status().created_background),
795 ..Default::default()
796 };
797 let deletion_highlight_style = HighlightStyle {
798 background_color: Some(cx.theme().status().deleted_background),
799 ..Default::default()
800 };
801 let syntax_theme = cx.theme().syntax();
802
803 for (range, edit_text) in edits {
804 let edit_new_end_in_preview_snapshot = range
805 .end
806 .bias_right(&self.old_snapshot)
807 .to_offset(&self.applied_edits_snapshot);
808 let edit_start_in_preview_snapshot =
809 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
810
811 let unchanged_range_in_preview_snapshot =
812 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
813 if !unchanged_range_in_preview_snapshot.is_empty() {
814 highlighted_text.add_text_from_buffer_range(
815 unchanged_range_in_preview_snapshot,
816 &self.applied_edits_snapshot,
817 &self.syntax_snapshot,
818 None,
819 syntax_theme,
820 );
821 }
822
823 let range_in_current_snapshot = range.to_offset(current_snapshot);
824 if include_deletions && !range_in_current_snapshot.is_empty() {
825 highlighted_text.add_text_from_buffer_range(
826 range_in_current_snapshot,
827 ¤t_snapshot.text,
828 ¤t_snapshot.syntax,
829 Some(deletion_highlight_style),
830 syntax_theme,
831 );
832 }
833
834 if !edit_text.as_ref().is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
837 &self.applied_edits_snapshot,
838 &self.syntax_snapshot,
839 Some(insertion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
845 }
846
847 highlighted_text.add_text_from_buffer_range(
848 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
849 &self.applied_edits_snapshot,
850 &self.syntax_snapshot,
851 None,
852 syntax_theme,
853 );
854
855 highlighted_text.build()
856 }
857
858 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
859 cx.new(|cx| {
860 let mut buffer = Buffer::local_normalized(
861 self.applied_edits_snapshot.as_rope().clone(),
862 self.applied_edits_snapshot.line_ending(),
863 cx,
864 );
865 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
866 buffer
867 })
868 }
869
870 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
871 let (first, _) = edits.first()?;
872 let (last, _) = edits.last()?;
873
874 let start = first
875 .start
876 .bias_left(&self.old_snapshot)
877 .to_point(&self.applied_edits_snapshot);
878 let end = last
879 .end
880 .bias_right(&self.old_snapshot)
881 .to_point(&self.applied_edits_snapshot);
882
883 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
884 let range = Point::new(start.row, 0)
885 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
886
887 Some(range)
888 }
889}
890
891#[derive(Clone, Debug, PartialEq, Eq)]
892pub struct BracketMatch<T> {
893 pub open_range: Range<T>,
894 pub close_range: Range<T>,
895 pub newline_only: bool,
896 pub syntax_layer_depth: usize,
897 pub color_index: Option<usize>,
898}
899
900impl<T> BracketMatch<T> {
901 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
902 (self.open_range, self.close_range)
903 }
904}
905
906impl Buffer {
907 /// Create a new buffer with the given base text.
908 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
909 Self::build(
910 TextBuffer::new(
911 ReplicaId::LOCAL,
912 cx.entity_id().as_non_zero_u64().into(),
913 base_text.into(),
914 ),
915 None,
916 Capability::ReadWrite,
917 )
918 }
919
920 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
921 pub fn local_normalized(
922 base_text_normalized: Rope,
923 line_ending: LineEnding,
924 cx: &Context<Self>,
925 ) -> Self {
926 Self::build(
927 TextBuffer::new_normalized(
928 ReplicaId::LOCAL,
929 cx.entity_id().as_non_zero_u64().into(),
930 line_ending,
931 base_text_normalized,
932 ),
933 None,
934 Capability::ReadWrite,
935 )
936 }
937
938 /// Create a new buffer that is a replica of a remote buffer.
939 pub fn remote(
940 remote_id: BufferId,
941 replica_id: ReplicaId,
942 capability: Capability,
943 base_text: impl Into<String>,
944 ) -> Self {
945 Self::build(
946 TextBuffer::new(replica_id, remote_id, base_text.into()),
947 None,
948 capability,
949 )
950 }
951
952 /// Create a new buffer that is a replica of a remote buffer, populating its
953 /// state from the given protobuf message.
954 pub fn from_proto(
955 replica_id: ReplicaId,
956 capability: Capability,
957 message: proto::BufferState,
958 file: Option<Arc<dyn File>>,
959 ) -> Result<Self> {
960 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
961 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
962 let mut this = Self::build(buffer, file, capability);
963 this.text.set_line_ending(proto::deserialize_line_ending(
964 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
965 ));
966 this.saved_version = proto::deserialize_version(&message.saved_version);
967 this.saved_mtime = message.saved_mtime.map(|time| time.into());
968 Ok(this)
969 }
970
971 /// Serialize the buffer's state to a protobuf message.
972 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
973 proto::BufferState {
974 id: self.remote_id().into(),
975 file: self.file.as_ref().map(|f| f.to_proto(cx)),
976 base_text: self.base_text().to_string(),
977 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
978 saved_version: proto::serialize_version(&self.saved_version),
979 saved_mtime: self.saved_mtime.map(|time| time.into()),
980 }
981 }
982
983 /// Serialize as protobufs all of the changes to the buffer since the given version.
984 pub fn serialize_ops(
985 &self,
986 since: Option<clock::Global>,
987 cx: &App,
988 ) -> Task<Vec<proto::Operation>> {
989 let mut operations = Vec::new();
990 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
991
992 operations.extend(self.remote_selections.iter().map(|(_, set)| {
993 proto::serialize_operation(&Operation::UpdateSelections {
994 selections: set.selections.clone(),
995 lamport_timestamp: set.lamport_timestamp,
996 line_mode: set.line_mode,
997 cursor_shape: set.cursor_shape,
998 })
999 }));
1000
1001 for (server_id, diagnostics) in &self.diagnostics {
1002 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1003 lamport_timestamp: self.diagnostics_timestamp,
1004 server_id: *server_id,
1005 diagnostics: diagnostics.iter().cloned().collect(),
1006 }));
1007 }
1008
1009 for (server_id, completions) in &self.completion_triggers_per_language_server {
1010 operations.push(proto::serialize_operation(
1011 &Operation::UpdateCompletionTriggers {
1012 triggers: completions.iter().cloned().collect(),
1013 lamport_timestamp: self.completion_triggers_timestamp,
1014 server_id: *server_id,
1015 },
1016 ));
1017 }
1018
1019 let text_operations = self.text.operations().clone();
1020 cx.background_spawn(async move {
1021 let since = since.unwrap_or_default();
1022 operations.extend(
1023 text_operations
1024 .iter()
1025 .filter(|(_, op)| !since.observed(op.timestamp()))
1026 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1027 );
1028 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1029 operations
1030 })
1031 }
1032
1033 /// Assign a language to the buffer, returning the buffer.
1034 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1035 self.set_language_async(Some(language), cx);
1036 self
1037 }
1038
1039 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1040 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1041 self.set_language(Some(language), cx);
1042 self
1043 }
1044
1045 /// Returns the [`Capability`] of this buffer.
1046 pub fn capability(&self) -> Capability {
1047 self.capability
1048 }
1049
1050 /// Whether this buffer can only be read.
1051 pub fn read_only(&self) -> bool {
1052 self.capability == Capability::ReadOnly
1053 }
1054
1055 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1056 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1057 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1058 let snapshot = buffer.snapshot();
1059 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1060 let tree_sitter_data = TreeSitterData::new(snapshot);
1061 Self {
1062 saved_mtime,
1063 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1064 saved_version: buffer.version(),
1065 preview_version: buffer.version(),
1066 reload_task: None,
1067 transaction_depth: 0,
1068 was_dirty_before_starting_transaction: None,
1069 has_unsaved_edits: Cell::new((buffer.version(), false)),
1070 text: buffer,
1071 branch_state: None,
1072 file,
1073 capability,
1074 syntax_map,
1075 reparse: None,
1076 non_text_state_update_count: 0,
1077 sync_parse_timeout: Duration::from_millis(1),
1078 parse_status: watch::channel(ParseStatus::Idle),
1079 autoindent_requests: Default::default(),
1080 wait_for_autoindent_txs: Default::default(),
1081 pending_autoindent: Default::default(),
1082 language: None,
1083 remote_selections: Default::default(),
1084 diagnostics: Default::default(),
1085 diagnostics_timestamp: Lamport::MIN,
1086 completion_triggers: Default::default(),
1087 completion_triggers_per_language_server: Default::default(),
1088 completion_triggers_timestamp: Lamport::MIN,
1089 deferred_ops: OperationQueue::new(),
1090 has_conflict: false,
1091 change_bits: Default::default(),
1092 _subscriptions: Vec::new(),
1093 }
1094 }
1095
1096 pub fn build_snapshot(
1097 text: Rope,
1098 language: Option<Arc<Language>>,
1099 language_registry: Option<Arc<LanguageRegistry>>,
1100 cx: &mut App,
1101 ) -> impl Future<Output = BufferSnapshot> + use<> {
1102 let entity_id = cx.reserve_entity::<Self>().entity_id();
1103 let buffer_id = entity_id.as_non_zero_u64().into();
1104 async move {
1105 let text =
1106 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1107 .snapshot();
1108 let mut syntax = SyntaxMap::new(&text).snapshot();
1109 if let Some(language) = language.clone() {
1110 let language_registry = language_registry.clone();
1111 syntax.reparse(&text, language_registry, language);
1112 }
1113 let tree_sitter_data = TreeSitterData::new(text.clone());
1114 BufferSnapshot {
1115 text,
1116 syntax,
1117 file: None,
1118 diagnostics: Default::default(),
1119 remote_selections: Default::default(),
1120 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1121 language,
1122 non_text_state_update_count: 0,
1123 }
1124 }
1125 }
1126
1127 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1128 let entity_id = cx.reserve_entity::<Self>().entity_id();
1129 let buffer_id = entity_id.as_non_zero_u64().into();
1130 let text = TextBuffer::new_normalized(
1131 ReplicaId::LOCAL,
1132 buffer_id,
1133 Default::default(),
1134 Rope::new(),
1135 )
1136 .snapshot();
1137 let syntax = SyntaxMap::new(&text).snapshot();
1138 let tree_sitter_data = TreeSitterData::new(text.clone());
1139 BufferSnapshot {
1140 text,
1141 syntax,
1142 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1143 file: None,
1144 diagnostics: Default::default(),
1145 remote_selections: Default::default(),
1146 language: None,
1147 non_text_state_update_count: 0,
1148 }
1149 }
1150
1151 #[cfg(any(test, feature = "test-support"))]
1152 pub fn build_snapshot_sync(
1153 text: Rope,
1154 language: Option<Arc<Language>>,
1155 language_registry: Option<Arc<LanguageRegistry>>,
1156 cx: &mut App,
1157 ) -> BufferSnapshot {
1158 let entity_id = cx.reserve_entity::<Self>().entity_id();
1159 let buffer_id = entity_id.as_non_zero_u64().into();
1160 let text =
1161 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1162 .snapshot();
1163 let mut syntax = SyntaxMap::new(&text).snapshot();
1164 if let Some(language) = language.clone() {
1165 syntax.reparse(&text, language_registry, language);
1166 }
1167 let tree_sitter_data = TreeSitterData::new(text.clone());
1168 BufferSnapshot {
1169 text,
1170 syntax,
1171 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1172 file: None,
1173 diagnostics: Default::default(),
1174 remote_selections: Default::default(),
1175 language,
1176 non_text_state_update_count: 0,
1177 }
1178 }
1179
1180 /// Retrieve a snapshot of the buffer's current state. This is computationally
1181 /// cheap, and allows reading from the buffer on a background thread.
1182 pub fn snapshot(&self) -> BufferSnapshot {
1183 let text = self.text.snapshot();
1184 let mut syntax_map = self.syntax_map.lock();
1185 syntax_map.interpolate(&text);
1186 let syntax = syntax_map.snapshot();
1187
1188 BufferSnapshot {
1189 text,
1190 syntax,
1191 tree_sitter_data: self.tree_sitter_data.clone(),
1192 file: self.file.clone(),
1193 remote_selections: self.remote_selections.clone(),
1194 diagnostics: self.diagnostics.clone(),
1195 language: self.language.clone(),
1196 non_text_state_update_count: self.non_text_state_update_count,
1197 }
1198 }
1199
1200 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1201 let this = cx.entity();
1202 cx.new(|cx| {
1203 let mut branch = Self {
1204 branch_state: Some(BufferBranchState {
1205 base_buffer: this.clone(),
1206 merged_operations: Default::default(),
1207 }),
1208 language: self.language.clone(),
1209 has_conflict: self.has_conflict,
1210 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1211 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1212 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1213 };
1214 if let Some(language_registry) = self.language_registry() {
1215 branch.set_language_registry(language_registry);
1216 }
1217
1218 // Reparse the branch buffer so that we get syntax highlighting immediately.
1219 branch.reparse(cx, true);
1220
1221 branch
1222 })
1223 }
1224
1225 pub fn preview_edits(
1226 &self,
1227 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1228 cx: &App,
1229 ) -> Task<EditPreview> {
1230 let registry = self.language_registry();
1231 let language = self.language().cloned();
1232 let old_snapshot = self.text.snapshot();
1233 let mut branch_buffer = self.text.branch();
1234 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1235 cx.background_spawn(async move {
1236 if !edits.is_empty() {
1237 if let Some(language) = language.clone() {
1238 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1239 }
1240
1241 branch_buffer.edit(edits.iter().cloned());
1242 let snapshot = branch_buffer.snapshot();
1243 syntax_snapshot.interpolate(&snapshot);
1244
1245 if let Some(language) = language {
1246 syntax_snapshot.reparse(&snapshot, registry, language);
1247 }
1248 }
1249 EditPreview {
1250 old_snapshot,
1251 applied_edits_snapshot: branch_buffer.snapshot(),
1252 syntax_snapshot,
1253 }
1254 })
1255 }
1256
1257 /// Applies all of the changes in this buffer that intersect any of the
1258 /// given `ranges` to its base buffer.
1259 ///
1260 /// If `ranges` is empty, then all changes will be applied. This buffer must
1261 /// be a branch buffer to call this method.
1262 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1263 let Some(base_buffer) = self.base_buffer() else {
1264 debug_panic!("not a branch buffer");
1265 return;
1266 };
1267
1268 let mut ranges = if ranges.is_empty() {
1269 &[0..usize::MAX]
1270 } else {
1271 ranges.as_slice()
1272 }
1273 .iter()
1274 .peekable();
1275
1276 let mut edits = Vec::new();
1277 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1278 let mut is_included = false;
1279 while let Some(range) = ranges.peek() {
1280 if range.end < edit.new.start {
1281 ranges.next().unwrap();
1282 } else {
1283 if range.start <= edit.new.end {
1284 is_included = true;
1285 }
1286 break;
1287 }
1288 }
1289
1290 if is_included {
1291 edits.push((
1292 edit.old.clone(),
1293 self.text_for_range(edit.new.clone()).collect::<String>(),
1294 ));
1295 }
1296 }
1297
1298 let operation = base_buffer.update(cx, |base_buffer, cx| {
1299 // cx.emit(BufferEvent::DiffBaseChanged);
1300 base_buffer.edit(edits, None, cx)
1301 });
1302
1303 if let Some(operation) = operation
1304 && let Some(BufferBranchState {
1305 merged_operations, ..
1306 }) = &mut self.branch_state
1307 {
1308 merged_operations.push(operation);
1309 }
1310 }
1311
1312 fn on_base_buffer_event(
1313 &mut self,
1314 _: Entity<Buffer>,
1315 event: &BufferEvent,
1316 cx: &mut Context<Self>,
1317 ) {
1318 let BufferEvent::Operation { operation, .. } = event else {
1319 return;
1320 };
1321 let Some(BufferBranchState {
1322 merged_operations, ..
1323 }) = &mut self.branch_state
1324 else {
1325 return;
1326 };
1327
1328 let mut operation_to_undo = None;
1329 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1330 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1331 {
1332 merged_operations.remove(ix);
1333 operation_to_undo = Some(operation.timestamp);
1334 }
1335
1336 self.apply_ops([operation.clone()], cx);
1337
1338 if let Some(timestamp) = operation_to_undo {
1339 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1340 self.undo_operations(counts, cx);
1341 }
1342 }
1343
1344 #[cfg(test)]
1345 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1346 &self.text
1347 }
1348
1349 /// Retrieve a snapshot of the buffer's raw text, without any
1350 /// language-related state like the syntax tree or diagnostics.
1351 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1352 self.text.snapshot()
1353 }
1354
1355 /// The file associated with the buffer, if any.
1356 pub fn file(&self) -> Option<&Arc<dyn File>> {
1357 self.file.as_ref()
1358 }
1359
1360 /// The version of the buffer that was last saved or reloaded from disk.
1361 pub fn saved_version(&self) -> &clock::Global {
1362 &self.saved_version
1363 }
1364
1365 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1366 pub fn saved_mtime(&self) -> Option<MTime> {
1367 self.saved_mtime
1368 }
1369
1370 /// Assign a language to the buffer.
1371 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1372 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1373 }
1374
1375 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1376 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1377 self.set_language_(language, true, cx);
1378 }
1379
1380 fn set_language_(
1381 &mut self,
1382 language: Option<Arc<Language>>,
1383 may_block: bool,
1384 cx: &mut Context<Self>,
1385 ) {
1386 self.non_text_state_update_count += 1;
1387 self.syntax_map.lock().clear(&self.text);
1388 self.language = language;
1389 self.was_changed();
1390 self.reparse(cx, may_block);
1391 cx.emit(BufferEvent::LanguageChanged);
1392 }
1393
1394 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1395 /// other languages if parts of the buffer are written in different languages.
1396 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1397 self.syntax_map
1398 .lock()
1399 .set_language_registry(language_registry);
1400 }
1401
1402 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1403 self.syntax_map.lock().language_registry()
1404 }
1405
1406 /// Assign the line ending type to the buffer.
1407 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1408 self.text.set_line_ending(line_ending);
1409
1410 let lamport_timestamp = self.text.lamport_clock.tick();
1411 self.send_operation(
1412 Operation::UpdateLineEnding {
1413 line_ending,
1414 lamport_timestamp,
1415 },
1416 true,
1417 cx,
1418 );
1419 }
1420
1421 /// Assign the buffer a new [`Capability`].
1422 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1423 if self.capability != capability {
1424 self.capability = capability;
1425 cx.emit(BufferEvent::CapabilityChanged)
1426 }
1427 }
1428
1429 /// This method is called to signal that the buffer has been saved.
1430 pub fn did_save(
1431 &mut self,
1432 version: clock::Global,
1433 mtime: Option<MTime>,
1434 cx: &mut Context<Self>,
1435 ) {
1436 self.saved_version = version.clone();
1437 self.has_unsaved_edits.set((version, false));
1438 self.has_conflict = false;
1439 self.saved_mtime = mtime;
1440 self.was_changed();
1441 cx.emit(BufferEvent::Saved);
1442 cx.notify();
1443 }
1444
1445 /// Reloads the contents of the buffer from disk.
1446 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1447 let (tx, rx) = futures::channel::oneshot::channel();
1448 let prev_version = self.text.version();
1449 self.reload_task = Some(cx.spawn(async move |this, cx| {
1450 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1451 let file = this.file.as_ref()?.as_local()?;
1452
1453 Some((file.disk_state().mtime(), file.load(cx)))
1454 })?
1455 else {
1456 return Ok(());
1457 };
1458
1459 let new_text = new_text.await?;
1460 let diff = this
1461 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1462 .await;
1463 this.update(cx, |this, cx| {
1464 if this.version() == diff.base_version {
1465 this.finalize_last_transaction();
1466 this.apply_diff(diff, cx);
1467 tx.send(this.finalize_last_transaction().cloned()).ok();
1468 this.has_conflict = false;
1469 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1470 } else {
1471 if !diff.edits.is_empty()
1472 || this
1473 .edits_since::<usize>(&diff.base_version)
1474 .next()
1475 .is_some()
1476 {
1477 this.has_conflict = true;
1478 }
1479
1480 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1481 }
1482
1483 this.reload_task.take();
1484 })
1485 }));
1486 rx
1487 }
1488
1489 /// This method is called to signal that the buffer has been reloaded.
1490 pub fn did_reload(
1491 &mut self,
1492 version: clock::Global,
1493 line_ending: LineEnding,
1494 mtime: Option<MTime>,
1495 cx: &mut Context<Self>,
1496 ) {
1497 self.saved_version = version;
1498 self.has_unsaved_edits
1499 .set((self.saved_version.clone(), false));
1500 self.text.set_line_ending(line_ending);
1501 self.saved_mtime = mtime;
1502 cx.emit(BufferEvent::Reloaded);
1503 cx.notify();
1504 }
1505
1506 /// Updates the [`File`] backing this buffer. This should be called when
1507 /// the file has changed or has been deleted.
1508 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1509 let was_dirty = self.is_dirty();
1510 let mut file_changed = false;
1511
1512 if let Some(old_file) = self.file.as_ref() {
1513 if new_file.path() != old_file.path() {
1514 file_changed = true;
1515 }
1516
1517 let old_state = old_file.disk_state();
1518 let new_state = new_file.disk_state();
1519 if old_state != new_state {
1520 file_changed = true;
1521 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1522 cx.emit(BufferEvent::ReloadNeeded)
1523 }
1524 }
1525 } else {
1526 file_changed = true;
1527 };
1528
1529 self.file = Some(new_file);
1530 if file_changed {
1531 self.was_changed();
1532 self.non_text_state_update_count += 1;
1533 if was_dirty != self.is_dirty() {
1534 cx.emit(BufferEvent::DirtyChanged);
1535 }
1536 cx.emit(BufferEvent::FileHandleChanged);
1537 cx.notify();
1538 }
1539 }
1540
1541 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1542 Some(self.branch_state.as_ref()?.base_buffer.clone())
1543 }
1544
1545 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1546 pub fn language(&self) -> Option<&Arc<Language>> {
1547 self.language.as_ref()
1548 }
1549
1550 /// Returns the [`Language`] at the given location.
1551 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1552 let offset = position.to_offset(self);
1553 let mut is_first = true;
1554 let start_anchor = self.anchor_before(offset);
1555 let end_anchor = self.anchor_after(offset);
1556 self.syntax_map
1557 .lock()
1558 .layers_for_range(offset..offset, &self.text, false)
1559 .filter(|layer| {
1560 if is_first {
1561 is_first = false;
1562 return true;
1563 }
1564
1565 layer
1566 .included_sub_ranges
1567 .map(|sub_ranges| {
1568 sub_ranges.iter().any(|sub_range| {
1569 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1570 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1571 !is_before_start && !is_after_end
1572 })
1573 })
1574 .unwrap_or(true)
1575 })
1576 .last()
1577 .map(|info| info.language.clone())
1578 .or_else(|| self.language.clone())
1579 }
1580
1581 /// Returns each [`Language`] for the active syntax layers at the given location.
1582 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1583 let offset = position.to_offset(self);
1584 let mut languages: Vec<Arc<Language>> = self
1585 .syntax_map
1586 .lock()
1587 .layers_for_range(offset..offset, &self.text, false)
1588 .map(|info| info.language.clone())
1589 .collect();
1590
1591 if languages.is_empty()
1592 && let Some(buffer_language) = self.language()
1593 {
1594 languages.push(buffer_language.clone());
1595 }
1596
1597 languages
1598 }
1599
1600 /// An integer version number that accounts for all updates besides
1601 /// the buffer's text itself (which is versioned via a version vector).
1602 pub fn non_text_state_update_count(&self) -> usize {
1603 self.non_text_state_update_count
1604 }
1605
1606 /// Whether the buffer is being parsed in the background.
1607 #[cfg(any(test, feature = "test-support"))]
1608 pub fn is_parsing(&self) -> bool {
1609 self.reparse.is_some()
1610 }
1611
1612 /// Indicates whether the buffer contains any regions that may be
1613 /// written in a language that hasn't been loaded yet.
1614 pub fn contains_unknown_injections(&self) -> bool {
1615 self.syntax_map.lock().contains_unknown_injections()
1616 }
1617
1618 #[cfg(any(test, feature = "test-support"))]
1619 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1620 self.sync_parse_timeout = timeout;
1621 }
1622
1623 /// Called after an edit to synchronize the buffer's main parse tree with
1624 /// the buffer's new underlying state.
1625 ///
1626 /// Locks the syntax map and interpolates the edits since the last reparse
1627 /// into the foreground syntax tree.
1628 ///
1629 /// Then takes a stable snapshot of the syntax map before unlocking it.
1630 /// The snapshot with the interpolated edits is sent to a background thread,
1631 /// where we ask Tree-sitter to perform an incremental parse.
1632 ///
1633 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1634 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1635 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1636 ///
1637 /// If we time out waiting on the parse, we spawn a second task waiting
1638 /// until the parse does complete and return with the interpolated tree still
1639 /// in the foreground. When the background parse completes, call back into
1640 /// the main thread and assign the foreground parse state.
1641 ///
1642 /// If the buffer or grammar changed since the start of the background parse,
1643 /// initiate an additional reparse recursively. To avoid concurrent parses
1644 /// for the same buffer, we only initiate a new parse if we are not already
1645 /// parsing in the background.
1646 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1647 if self.reparse.is_some() {
1648 return;
1649 }
1650 let language = if let Some(language) = self.language.clone() {
1651 language
1652 } else {
1653 return;
1654 };
1655
1656 let text = self.text_snapshot();
1657 let parsed_version = self.version();
1658
1659 let mut syntax_map = self.syntax_map.lock();
1660 syntax_map.interpolate(&text);
1661 let language_registry = syntax_map.language_registry();
1662 let mut syntax_snapshot = syntax_map.snapshot();
1663 drop(syntax_map);
1664
1665 let parse_task = cx.background_spawn({
1666 let language = language.clone();
1667 let language_registry = language_registry.clone();
1668 async move {
1669 syntax_snapshot.reparse(&text, language_registry, language);
1670 syntax_snapshot
1671 }
1672 });
1673
1674 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1675 if may_block {
1676 match cx
1677 .background_executor()
1678 .block_with_timeout(self.sync_parse_timeout, parse_task)
1679 {
1680 Ok(new_syntax_snapshot) => {
1681 self.did_finish_parsing(new_syntax_snapshot, cx);
1682 self.reparse = None;
1683 }
1684 Err(parse_task) => {
1685 self.reparse = Some(cx.spawn(async move |this, cx| {
1686 let new_syntax_map = cx.background_spawn(parse_task).await;
1687 this.update(cx, move |this, cx| {
1688 let grammar_changed = || {
1689 this.language.as_ref().is_none_or(|current_language| {
1690 !Arc::ptr_eq(&language, current_language)
1691 })
1692 };
1693 let language_registry_changed = || {
1694 new_syntax_map.contains_unknown_injections()
1695 && language_registry.is_some_and(|registry| {
1696 registry.version()
1697 != new_syntax_map.language_registry_version()
1698 })
1699 };
1700 let parse_again = this.version.changed_since(&parsed_version)
1701 || language_registry_changed()
1702 || grammar_changed();
1703 this.did_finish_parsing(new_syntax_map, cx);
1704 this.reparse = None;
1705 if parse_again {
1706 this.reparse(cx, false);
1707 }
1708 })
1709 .ok();
1710 }));
1711 }
1712 }
1713 } else {
1714 self.reparse = Some(cx.spawn(async move |this, cx| {
1715 let new_syntax_map = cx.background_spawn(parse_task).await;
1716 this.update(cx, move |this, cx| {
1717 let grammar_changed = || {
1718 this.language.as_ref().is_none_or(|current_language| {
1719 !Arc::ptr_eq(&language, current_language)
1720 })
1721 };
1722 let language_registry_changed = || {
1723 new_syntax_map.contains_unknown_injections()
1724 && language_registry.is_some_and(|registry| {
1725 registry.version() != new_syntax_map.language_registry_version()
1726 })
1727 };
1728 let parse_again = this.version.changed_since(&parsed_version)
1729 || language_registry_changed()
1730 || grammar_changed();
1731 this.did_finish_parsing(new_syntax_map, cx);
1732 this.reparse = None;
1733 if parse_again {
1734 this.reparse(cx, false);
1735 }
1736 })
1737 .ok();
1738 }));
1739 }
1740 }
1741
1742 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1743 self.was_changed();
1744 self.non_text_state_update_count += 1;
1745 self.syntax_map.lock().did_parse(syntax_snapshot);
1746 self.request_autoindent(cx);
1747 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1748 self.tree_sitter_data.lock().clear();
1749 cx.emit(BufferEvent::Reparsed);
1750 cx.notify();
1751 }
1752
1753 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1754 self.parse_status.1.clone()
1755 }
1756
1757 /// Wait until the buffer is no longer parsing
1758 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1759 let mut parse_status = self.parse_status();
1760 async move {
1761 while *parse_status.borrow() != ParseStatus::Idle {
1762 if parse_status.changed().await.is_err() {
1763 break;
1764 }
1765 }
1766 }
1767 }
1768
1769 /// Assign to the buffer a set of diagnostics created by a given language server.
1770 pub fn update_diagnostics(
1771 &mut self,
1772 server_id: LanguageServerId,
1773 diagnostics: DiagnosticSet,
1774 cx: &mut Context<Self>,
1775 ) {
1776 let lamport_timestamp = self.text.lamport_clock.tick();
1777 let op = Operation::UpdateDiagnostics {
1778 server_id,
1779 diagnostics: diagnostics.iter().cloned().collect(),
1780 lamport_timestamp,
1781 };
1782
1783 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1784 self.send_operation(op, true, cx);
1785 }
1786
1787 pub fn buffer_diagnostics(
1788 &self,
1789 for_server: Option<LanguageServerId>,
1790 ) -> Vec<&DiagnosticEntry<Anchor>> {
1791 match for_server {
1792 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1793 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1794 Err(_) => Vec::new(),
1795 },
1796 None => self
1797 .diagnostics
1798 .iter()
1799 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1800 .collect(),
1801 }
1802 }
1803
1804 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1805 if let Some(indent_sizes) = self.compute_autoindents() {
1806 let indent_sizes = cx.background_spawn(indent_sizes);
1807 match cx
1808 .background_executor()
1809 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1810 {
1811 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1812 Err(indent_sizes) => {
1813 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1814 let indent_sizes = indent_sizes.await;
1815 this.update(cx, |this, cx| {
1816 this.apply_autoindents(indent_sizes, cx);
1817 })
1818 .ok();
1819 }));
1820 }
1821 }
1822 } else {
1823 self.autoindent_requests.clear();
1824 for tx in self.wait_for_autoindent_txs.drain(..) {
1825 tx.send(()).ok();
1826 }
1827 }
1828 }
1829
1830 fn compute_autoindents(
1831 &self,
1832 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1833 let max_rows_between_yields = 100;
1834 let snapshot = self.snapshot();
1835 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1836 return None;
1837 }
1838
1839 let autoindent_requests = self.autoindent_requests.clone();
1840 Some(async move {
1841 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1842 for request in autoindent_requests {
1843 // Resolve each edited range to its row in the current buffer and in the
1844 // buffer before this batch of edits.
1845 let mut row_ranges = Vec::new();
1846 let mut old_to_new_rows = BTreeMap::new();
1847 let mut language_indent_sizes_by_new_row = Vec::new();
1848 for entry in &request.entries {
1849 let position = entry.range.start;
1850 let new_row = position.to_point(&snapshot).row;
1851 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1852 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1853
1854 if !entry.first_line_is_new {
1855 let old_row = position.to_point(&request.before_edit).row;
1856 old_to_new_rows.insert(old_row, new_row);
1857 }
1858 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1859 }
1860
1861 // Build a map containing the suggested indentation for each of the edited lines
1862 // with respect to the state of the buffer before these edits. This map is keyed
1863 // by the rows for these lines in the current state of the buffer.
1864 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1865 let old_edited_ranges =
1866 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1867 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1868 let mut language_indent_size = IndentSize::default();
1869 for old_edited_range in old_edited_ranges {
1870 let suggestions = request
1871 .before_edit
1872 .suggest_autoindents(old_edited_range.clone())
1873 .into_iter()
1874 .flatten();
1875 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1876 if let Some(suggestion) = suggestion {
1877 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1878
1879 // Find the indent size based on the language for this row.
1880 while let Some((row, size)) = language_indent_sizes.peek() {
1881 if *row > new_row {
1882 break;
1883 }
1884 language_indent_size = *size;
1885 language_indent_sizes.next();
1886 }
1887
1888 let suggested_indent = old_to_new_rows
1889 .get(&suggestion.basis_row)
1890 .and_then(|from_row| {
1891 Some(old_suggestions.get(from_row).copied()?.0)
1892 })
1893 .unwrap_or_else(|| {
1894 request
1895 .before_edit
1896 .indent_size_for_line(suggestion.basis_row)
1897 })
1898 .with_delta(suggestion.delta, language_indent_size);
1899 old_suggestions
1900 .insert(new_row, (suggested_indent, suggestion.within_error));
1901 }
1902 }
1903 yield_now().await;
1904 }
1905
1906 // Compute new suggestions for each line, but only include them in the result
1907 // if they differ from the old suggestion for that line.
1908 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1909 let mut language_indent_size = IndentSize::default();
1910 for (row_range, original_indent_column) in row_ranges {
1911 let new_edited_row_range = if request.is_block_mode {
1912 row_range.start..row_range.start + 1
1913 } else {
1914 row_range.clone()
1915 };
1916
1917 let suggestions = snapshot
1918 .suggest_autoindents(new_edited_row_range.clone())
1919 .into_iter()
1920 .flatten();
1921 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1922 if let Some(suggestion) = suggestion {
1923 // Find the indent size based on the language for this row.
1924 while let Some((row, size)) = language_indent_sizes.peek() {
1925 if *row > new_row {
1926 break;
1927 }
1928 language_indent_size = *size;
1929 language_indent_sizes.next();
1930 }
1931
1932 let suggested_indent = indent_sizes
1933 .get(&suggestion.basis_row)
1934 .copied()
1935 .map(|e| e.0)
1936 .unwrap_or_else(|| {
1937 snapshot.indent_size_for_line(suggestion.basis_row)
1938 })
1939 .with_delta(suggestion.delta, language_indent_size);
1940
1941 if old_suggestions.get(&new_row).is_none_or(
1942 |(old_indentation, was_within_error)| {
1943 suggested_indent != *old_indentation
1944 && (!suggestion.within_error || *was_within_error)
1945 },
1946 ) {
1947 indent_sizes.insert(
1948 new_row,
1949 (suggested_indent, request.ignore_empty_lines),
1950 );
1951 }
1952 }
1953 }
1954
1955 if let (true, Some(original_indent_column)) =
1956 (request.is_block_mode, original_indent_column)
1957 {
1958 let new_indent =
1959 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1960 *indent
1961 } else {
1962 snapshot.indent_size_for_line(row_range.start)
1963 };
1964 let delta = new_indent.len as i64 - original_indent_column as i64;
1965 if delta != 0 {
1966 for row in row_range.skip(1) {
1967 indent_sizes.entry(row).or_insert_with(|| {
1968 let mut size = snapshot.indent_size_for_line(row);
1969 if size.kind == new_indent.kind {
1970 match delta.cmp(&0) {
1971 Ordering::Greater => size.len += delta as u32,
1972 Ordering::Less => {
1973 size.len = size.len.saturating_sub(-delta as u32)
1974 }
1975 Ordering::Equal => {}
1976 }
1977 }
1978 (size, request.ignore_empty_lines)
1979 });
1980 }
1981 }
1982 }
1983
1984 yield_now().await;
1985 }
1986 }
1987
1988 indent_sizes
1989 .into_iter()
1990 .filter_map(|(row, (indent, ignore_empty_lines))| {
1991 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1992 None
1993 } else {
1994 Some((row, indent))
1995 }
1996 })
1997 .collect()
1998 })
1999 }
2000
2001 fn apply_autoindents(
2002 &mut self,
2003 indent_sizes: BTreeMap<u32, IndentSize>,
2004 cx: &mut Context<Self>,
2005 ) {
2006 self.autoindent_requests.clear();
2007 for tx in self.wait_for_autoindent_txs.drain(..) {
2008 tx.send(()).ok();
2009 }
2010
2011 let edits: Vec<_> = indent_sizes
2012 .into_iter()
2013 .filter_map(|(row, indent_size)| {
2014 let current_size = indent_size_for_line(self, row);
2015 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2016 })
2017 .collect();
2018
2019 let preserve_preview = self.preserve_preview();
2020 self.edit(edits, None, cx);
2021 if preserve_preview {
2022 self.refresh_preview();
2023 }
2024 }
2025
2026 /// Create a minimal edit that will cause the given row to be indented
2027 /// with the given size. After applying this edit, the length of the line
2028 /// will always be at least `new_size.len`.
2029 pub fn edit_for_indent_size_adjustment(
2030 row: u32,
2031 current_size: IndentSize,
2032 new_size: IndentSize,
2033 ) -> Option<(Range<Point>, String)> {
2034 if new_size.kind == current_size.kind {
2035 match new_size.len.cmp(¤t_size.len) {
2036 Ordering::Greater => {
2037 let point = Point::new(row, 0);
2038 Some((
2039 point..point,
2040 iter::repeat(new_size.char())
2041 .take((new_size.len - current_size.len) as usize)
2042 .collect::<String>(),
2043 ))
2044 }
2045
2046 Ordering::Less => Some((
2047 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2048 String::new(),
2049 )),
2050
2051 Ordering::Equal => None,
2052 }
2053 } else {
2054 Some((
2055 Point::new(row, 0)..Point::new(row, current_size.len),
2056 iter::repeat(new_size.char())
2057 .take(new_size.len as usize)
2058 .collect::<String>(),
2059 ))
2060 }
2061 }
2062
2063 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2064 /// and the given new text.
2065 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2066 let old_text = self.as_rope().clone();
2067 let base_version = self.version();
2068 cx.background_executor()
2069 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2070 let old_text = old_text.to_string();
2071 let line_ending = LineEnding::detect(&new_text);
2072 LineEnding::normalize(&mut new_text);
2073 let edits = text_diff(&old_text, &new_text);
2074 Diff {
2075 base_version,
2076 line_ending,
2077 edits,
2078 }
2079 })
2080 }
2081
2082 /// Spawns a background task that searches the buffer for any whitespace
2083 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2084 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2085 let old_text = self.as_rope().clone();
2086 let line_ending = self.line_ending();
2087 let base_version = self.version();
2088 cx.background_spawn(async move {
2089 let ranges = trailing_whitespace_ranges(&old_text);
2090 let empty = Arc::<str>::from("");
2091 Diff {
2092 base_version,
2093 line_ending,
2094 edits: ranges
2095 .into_iter()
2096 .map(|range| (range, empty.clone()))
2097 .collect(),
2098 }
2099 })
2100 }
2101
2102 /// Ensures that the buffer ends with a single newline character, and
2103 /// no other whitespace. Skips if the buffer is empty.
2104 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2105 let len = self.len();
2106 if len == 0 {
2107 return;
2108 }
2109 let mut offset = len;
2110 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2111 let non_whitespace_len = chunk
2112 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2113 .len();
2114 offset -= chunk.len();
2115 offset += non_whitespace_len;
2116 if non_whitespace_len != 0 {
2117 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2118 return;
2119 }
2120 break;
2121 }
2122 }
2123 self.edit([(offset..len, "\n")], None, cx);
2124 }
2125
2126 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2127 /// calculated, then adjust the diff to account for those changes, and discard any
2128 /// parts of the diff that conflict with those changes.
2129 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2130 let snapshot = self.snapshot();
2131 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2132 let mut delta = 0;
2133 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2134 while let Some(edit_since) = edits_since.peek() {
2135 // If the edit occurs after a diff hunk, then it does not
2136 // affect that hunk.
2137 if edit_since.old.start > range.end {
2138 break;
2139 }
2140 // If the edit precedes the diff hunk, then adjust the hunk
2141 // to reflect the edit.
2142 else if edit_since.old.end < range.start {
2143 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2144 edits_since.next();
2145 }
2146 // If the edit intersects a diff hunk, then discard that hunk.
2147 else {
2148 return None;
2149 }
2150 }
2151
2152 let start = (range.start as i64 + delta) as usize;
2153 let end = (range.end as i64 + delta) as usize;
2154 Some((start..end, new_text))
2155 });
2156
2157 self.start_transaction();
2158 self.text.set_line_ending(diff.line_ending);
2159 self.edit(adjusted_edits, None, cx);
2160 self.end_transaction(cx)
2161 }
2162
2163 pub fn has_unsaved_edits(&self) -> bool {
2164 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2165
2166 if last_version == self.version {
2167 self.has_unsaved_edits
2168 .set((last_version, has_unsaved_edits));
2169 return has_unsaved_edits;
2170 }
2171
2172 let has_edits = self.has_edits_since(&self.saved_version);
2173 self.has_unsaved_edits
2174 .set((self.version.clone(), has_edits));
2175 has_edits
2176 }
2177
2178 /// Checks if the buffer has unsaved changes.
2179 pub fn is_dirty(&self) -> bool {
2180 if self.capability == Capability::ReadOnly {
2181 return false;
2182 }
2183 if self.has_conflict {
2184 return true;
2185 }
2186 match self.file.as_ref().map(|f| f.disk_state()) {
2187 Some(DiskState::New) | Some(DiskState::Deleted) => {
2188 !self.is_empty() && self.has_unsaved_edits()
2189 }
2190 _ => self.has_unsaved_edits(),
2191 }
2192 }
2193
2194 /// Marks the buffer as having a conflict regardless of current buffer state.
2195 pub fn set_conflict(&mut self) {
2196 self.has_conflict = true;
2197 }
2198
2199 /// Checks if the buffer and its file have both changed since the buffer
2200 /// was last saved or reloaded.
2201 pub fn has_conflict(&self) -> bool {
2202 if self.has_conflict {
2203 return true;
2204 }
2205 let Some(file) = self.file.as_ref() else {
2206 return false;
2207 };
2208 match file.disk_state() {
2209 DiskState::New => false,
2210 DiskState::Present { mtime } => match self.saved_mtime {
2211 Some(saved_mtime) => {
2212 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2213 }
2214 None => true,
2215 },
2216 DiskState::Deleted => false,
2217 }
2218 }
2219
2220 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2221 pub fn subscribe(&mut self) -> Subscription<usize> {
2222 self.text.subscribe()
2223 }
2224
2225 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2226 ///
2227 /// This allows downstream code to check if the buffer's text has changed without
2228 /// waiting for an effect cycle, which would be required if using eents.
2229 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2230 if let Err(ix) = self
2231 .change_bits
2232 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2233 {
2234 self.change_bits.insert(ix, bit);
2235 }
2236 }
2237
2238 /// Set the change bit for all "listeners".
2239 fn was_changed(&mut self) {
2240 self.change_bits.retain(|change_bit| {
2241 change_bit
2242 .upgrade()
2243 .inspect(|bit| {
2244 _ = bit.replace(true);
2245 })
2246 .is_some()
2247 });
2248 }
2249
2250 /// Starts a transaction, if one is not already in-progress. When undoing or
2251 /// redoing edits, all of the edits performed within a transaction are undone
2252 /// or redone together.
2253 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2254 self.start_transaction_at(Instant::now())
2255 }
2256
2257 /// Starts a transaction, providing the current time. Subsequent transactions
2258 /// that occur within a short period of time will be grouped together. This
2259 /// is controlled by the buffer's undo grouping duration.
2260 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2261 self.transaction_depth += 1;
2262 if self.was_dirty_before_starting_transaction.is_none() {
2263 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2264 }
2265 self.text.start_transaction_at(now)
2266 }
2267
2268 /// Terminates the current transaction, if this is the outermost transaction.
2269 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2270 self.end_transaction_at(Instant::now(), cx)
2271 }
2272
2273 /// Terminates the current transaction, providing the current time. Subsequent transactions
2274 /// that occur within a short period of time will be grouped together. This
2275 /// is controlled by the buffer's undo grouping duration.
2276 pub fn end_transaction_at(
2277 &mut self,
2278 now: Instant,
2279 cx: &mut Context<Self>,
2280 ) -> Option<TransactionId> {
2281 assert!(self.transaction_depth > 0);
2282 self.transaction_depth -= 1;
2283 let was_dirty = if self.transaction_depth == 0 {
2284 self.was_dirty_before_starting_transaction.take().unwrap()
2285 } else {
2286 false
2287 };
2288 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2289 self.did_edit(&start_version, was_dirty, cx);
2290 Some(transaction_id)
2291 } else {
2292 None
2293 }
2294 }
2295
2296 /// Manually add a transaction to the buffer's undo history.
2297 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2298 self.text.push_transaction(transaction, now);
2299 }
2300
2301 /// Differs from `push_transaction` in that it does not clear the redo
2302 /// stack. Intended to be used to create a parent transaction to merge
2303 /// potential child transactions into.
2304 ///
2305 /// The caller is responsible for removing it from the undo history using
2306 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2307 /// are merged into this transaction, the caller is responsible for ensuring
2308 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2309 /// cleared is to create transactions with the usual `start_transaction` and
2310 /// `end_transaction` methods and merging the resulting transactions into
2311 /// the transaction created by this method
2312 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2313 self.text.push_empty_transaction(now)
2314 }
2315
2316 /// Prevent the last transaction from being grouped with any subsequent transactions,
2317 /// even if they occur with the buffer's undo grouping duration.
2318 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2319 self.text.finalize_last_transaction()
2320 }
2321
2322 /// Manually group all changes since a given transaction.
2323 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2324 self.text.group_until_transaction(transaction_id);
2325 }
2326
2327 /// Manually remove a transaction from the buffer's undo history
2328 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2329 self.text.forget_transaction(transaction_id)
2330 }
2331
2332 /// Retrieve a transaction from the buffer's undo history
2333 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2334 self.text.get_transaction(transaction_id)
2335 }
2336
2337 /// Manually merge two transactions in the buffer's undo history.
2338 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2339 self.text.merge_transactions(transaction, destination);
2340 }
2341
2342 /// Waits for the buffer to receive operations with the given timestamps.
2343 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2344 &mut self,
2345 edit_ids: It,
2346 ) -> impl Future<Output = Result<()>> + use<It> {
2347 self.text.wait_for_edits(edit_ids)
2348 }
2349
2350 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2351 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2352 &mut self,
2353 anchors: It,
2354 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2355 self.text.wait_for_anchors(anchors)
2356 }
2357
2358 /// Waits for the buffer to receive operations up to the given version.
2359 pub fn wait_for_version(
2360 &mut self,
2361 version: clock::Global,
2362 ) -> impl Future<Output = Result<()>> + use<> {
2363 self.text.wait_for_version(version)
2364 }
2365
2366 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2367 /// [`Buffer::wait_for_version`] to resolve with an error.
2368 pub fn give_up_waiting(&mut self) {
2369 self.text.give_up_waiting();
2370 }
2371
2372 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2373 let mut rx = None;
2374 if !self.autoindent_requests.is_empty() {
2375 let channel = oneshot::channel();
2376 self.wait_for_autoindent_txs.push(channel.0);
2377 rx = Some(channel.1);
2378 }
2379 rx
2380 }
2381
2382 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2383 pub fn set_active_selections(
2384 &mut self,
2385 selections: Arc<[Selection<Anchor>]>,
2386 line_mode: bool,
2387 cursor_shape: CursorShape,
2388 cx: &mut Context<Self>,
2389 ) {
2390 let lamport_timestamp = self.text.lamport_clock.tick();
2391 self.remote_selections.insert(
2392 self.text.replica_id(),
2393 SelectionSet {
2394 selections: selections.clone(),
2395 lamport_timestamp,
2396 line_mode,
2397 cursor_shape,
2398 },
2399 );
2400 self.send_operation(
2401 Operation::UpdateSelections {
2402 selections,
2403 line_mode,
2404 lamport_timestamp,
2405 cursor_shape,
2406 },
2407 true,
2408 cx,
2409 );
2410 self.non_text_state_update_count += 1;
2411 cx.notify();
2412 }
2413
2414 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2415 /// this replica.
2416 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2417 if self
2418 .remote_selections
2419 .get(&self.text.replica_id())
2420 .is_none_or(|set| !set.selections.is_empty())
2421 {
2422 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2423 }
2424 }
2425
2426 pub fn set_agent_selections(
2427 &mut self,
2428 selections: Arc<[Selection<Anchor>]>,
2429 line_mode: bool,
2430 cursor_shape: CursorShape,
2431 cx: &mut Context<Self>,
2432 ) {
2433 let lamport_timestamp = self.text.lamport_clock.tick();
2434 self.remote_selections.insert(
2435 ReplicaId::AGENT,
2436 SelectionSet {
2437 selections,
2438 lamport_timestamp,
2439 line_mode,
2440 cursor_shape,
2441 },
2442 );
2443 self.non_text_state_update_count += 1;
2444 cx.notify();
2445 }
2446
2447 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2448 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2449 }
2450
2451 /// Replaces the buffer's entire text.
2452 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2453 where
2454 T: Into<Arc<str>>,
2455 {
2456 self.autoindent_requests.clear();
2457 self.edit([(0..self.len(), text)], None, cx)
2458 }
2459
2460 /// Appends the given text to the end of the buffer.
2461 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2462 where
2463 T: Into<Arc<str>>,
2464 {
2465 self.edit([(self.len()..self.len(), text)], None, cx)
2466 }
2467
2468 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2469 /// delete, and a string of text to insert at that location.
2470 ///
2471 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2472 /// request for the edited ranges, which will be processed when the buffer finishes
2473 /// parsing.
2474 ///
2475 /// Parsing takes place at the end of a transaction, and may compute synchronously
2476 /// or asynchronously, depending on the changes.
2477 pub fn edit<I, S, T>(
2478 &mut self,
2479 edits_iter: I,
2480 autoindent_mode: Option<AutoindentMode>,
2481 cx: &mut Context<Self>,
2482 ) -> Option<clock::Lamport>
2483 where
2484 I: IntoIterator<Item = (Range<S>, T)>,
2485 S: ToOffset,
2486 T: Into<Arc<str>>,
2487 {
2488 // Skip invalid edits and coalesce contiguous ones.
2489 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2490
2491 for (range, new_text) in edits_iter {
2492 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2493
2494 if range.start > range.end {
2495 mem::swap(&mut range.start, &mut range.end);
2496 }
2497 let new_text = new_text.into();
2498 if !new_text.is_empty() || !range.is_empty() {
2499 if let Some((prev_range, prev_text)) = edits.last_mut()
2500 && prev_range.end >= range.start
2501 {
2502 prev_range.end = cmp::max(prev_range.end, range.end);
2503 *prev_text = format!("{prev_text}{new_text}").into();
2504 } else {
2505 edits.push((range, new_text));
2506 }
2507 }
2508 }
2509 if edits.is_empty() {
2510 return None;
2511 }
2512
2513 self.start_transaction();
2514 self.pending_autoindent.take();
2515 let autoindent_request = autoindent_mode
2516 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2517
2518 let edit_operation = self.text.edit(edits.iter().cloned());
2519 let edit_id = edit_operation.timestamp();
2520
2521 if let Some((before_edit, mode)) = autoindent_request {
2522 let mut delta = 0isize;
2523 let mut previous_setting = None;
2524 let entries: Vec<_> = edits
2525 .into_iter()
2526 .enumerate()
2527 .zip(&edit_operation.as_edit().unwrap().new_text)
2528 .filter(|((_, (range, _)), _)| {
2529 let language = before_edit.language_at(range.start);
2530 let language_id = language.map(|l| l.id());
2531 if let Some((cached_language_id, auto_indent)) = previous_setting
2532 && cached_language_id == language_id
2533 {
2534 auto_indent
2535 } else {
2536 // The auto-indent setting is not present in editorconfigs, hence
2537 // we can avoid passing the file here.
2538 let auto_indent =
2539 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2540 previous_setting = Some((language_id, auto_indent));
2541 auto_indent
2542 }
2543 })
2544 .map(|((ix, (range, _)), new_text)| {
2545 let new_text_length = new_text.len();
2546 let old_start = range.start.to_point(&before_edit);
2547 let new_start = (delta + range.start as isize) as usize;
2548 let range_len = range.end - range.start;
2549 delta += new_text_length as isize - range_len as isize;
2550
2551 // Decide what range of the insertion to auto-indent, and whether
2552 // the first line of the insertion should be considered a newly-inserted line
2553 // or an edit to an existing line.
2554 let mut range_of_insertion_to_indent = 0..new_text_length;
2555 let mut first_line_is_new = true;
2556
2557 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2558 let old_line_end = before_edit.line_len(old_start.row);
2559
2560 if old_start.column > old_line_start {
2561 first_line_is_new = false;
2562 }
2563
2564 if !new_text.contains('\n')
2565 && (old_start.column + (range_len as u32) < old_line_end
2566 || old_line_end == old_line_start)
2567 {
2568 first_line_is_new = false;
2569 }
2570
2571 // When inserting text starting with a newline, avoid auto-indenting the
2572 // previous line.
2573 if new_text.starts_with('\n') {
2574 range_of_insertion_to_indent.start += 1;
2575 first_line_is_new = true;
2576 }
2577
2578 let mut original_indent_column = None;
2579 if let AutoindentMode::Block {
2580 original_indent_columns,
2581 } = &mode
2582 {
2583 original_indent_column = Some(if new_text.starts_with('\n') {
2584 indent_size_for_text(
2585 new_text[range_of_insertion_to_indent.clone()].chars(),
2586 )
2587 .len
2588 } else {
2589 original_indent_columns
2590 .get(ix)
2591 .copied()
2592 .flatten()
2593 .unwrap_or_else(|| {
2594 indent_size_for_text(
2595 new_text[range_of_insertion_to_indent.clone()].chars(),
2596 )
2597 .len
2598 })
2599 });
2600
2601 // Avoid auto-indenting the line after the edit.
2602 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2603 range_of_insertion_to_indent.end -= 1;
2604 }
2605 }
2606
2607 AutoindentRequestEntry {
2608 first_line_is_new,
2609 original_indent_column,
2610 indent_size: before_edit.language_indent_size_at(range.start, cx),
2611 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2612 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2613 }
2614 })
2615 .collect();
2616
2617 if !entries.is_empty() {
2618 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2619 before_edit,
2620 entries,
2621 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2622 ignore_empty_lines: false,
2623 }));
2624 }
2625 }
2626
2627 self.end_transaction(cx);
2628 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2629 Some(edit_id)
2630 }
2631
2632 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2633 self.was_changed();
2634
2635 if self.edits_since::<usize>(old_version).next().is_none() {
2636 return;
2637 }
2638
2639 self.reparse(cx, true);
2640 cx.emit(BufferEvent::Edited);
2641 if was_dirty != self.is_dirty() {
2642 cx.emit(BufferEvent::DirtyChanged);
2643 }
2644 cx.notify();
2645 }
2646
2647 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2648 where
2649 I: IntoIterator<Item = Range<T>>,
2650 T: ToOffset + Copy,
2651 {
2652 let before_edit = self.snapshot();
2653 let entries = ranges
2654 .into_iter()
2655 .map(|range| AutoindentRequestEntry {
2656 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2657 first_line_is_new: true,
2658 indent_size: before_edit.language_indent_size_at(range.start, cx),
2659 original_indent_column: None,
2660 })
2661 .collect();
2662 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2663 before_edit,
2664 entries,
2665 is_block_mode: false,
2666 ignore_empty_lines: true,
2667 }));
2668 self.request_autoindent(cx);
2669 }
2670
2671 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2672 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2673 pub fn insert_empty_line(
2674 &mut self,
2675 position: impl ToPoint,
2676 space_above: bool,
2677 space_below: bool,
2678 cx: &mut Context<Self>,
2679 ) -> Point {
2680 let mut position = position.to_point(self);
2681
2682 self.start_transaction();
2683
2684 self.edit(
2685 [(position..position, "\n")],
2686 Some(AutoindentMode::EachLine),
2687 cx,
2688 );
2689
2690 if position.column > 0 {
2691 position += Point::new(1, 0);
2692 }
2693
2694 if !self.is_line_blank(position.row) {
2695 self.edit(
2696 [(position..position, "\n")],
2697 Some(AutoindentMode::EachLine),
2698 cx,
2699 );
2700 }
2701
2702 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2703 self.edit(
2704 [(position..position, "\n")],
2705 Some(AutoindentMode::EachLine),
2706 cx,
2707 );
2708 position.row += 1;
2709 }
2710
2711 if space_below
2712 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2713 {
2714 self.edit(
2715 [(position..position, "\n")],
2716 Some(AutoindentMode::EachLine),
2717 cx,
2718 );
2719 }
2720
2721 self.end_transaction(cx);
2722
2723 position
2724 }
2725
2726 /// Applies the given remote operations to the buffer.
2727 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2728 self.pending_autoindent.take();
2729 let was_dirty = self.is_dirty();
2730 let old_version = self.version.clone();
2731 let mut deferred_ops = Vec::new();
2732 let buffer_ops = ops
2733 .into_iter()
2734 .filter_map(|op| match op {
2735 Operation::Buffer(op) => Some(op),
2736 _ => {
2737 if self.can_apply_op(&op) {
2738 self.apply_op(op, cx);
2739 } else {
2740 deferred_ops.push(op);
2741 }
2742 None
2743 }
2744 })
2745 .collect::<Vec<_>>();
2746 for operation in buffer_ops.iter() {
2747 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2748 }
2749 self.text.apply_ops(buffer_ops);
2750 self.deferred_ops.insert(deferred_ops);
2751 self.flush_deferred_ops(cx);
2752 self.did_edit(&old_version, was_dirty, cx);
2753 // Notify independently of whether the buffer was edited as the operations could include a
2754 // selection update.
2755 cx.notify();
2756 }
2757
2758 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2759 let mut deferred_ops = Vec::new();
2760 for op in self.deferred_ops.drain().iter().cloned() {
2761 if self.can_apply_op(&op) {
2762 self.apply_op(op, cx);
2763 } else {
2764 deferred_ops.push(op);
2765 }
2766 }
2767 self.deferred_ops.insert(deferred_ops);
2768 }
2769
2770 pub fn has_deferred_ops(&self) -> bool {
2771 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2772 }
2773
2774 fn can_apply_op(&self, operation: &Operation) -> bool {
2775 match operation {
2776 Operation::Buffer(_) => {
2777 unreachable!("buffer operations should never be applied at this layer")
2778 }
2779 Operation::UpdateDiagnostics {
2780 diagnostics: diagnostic_set,
2781 ..
2782 } => diagnostic_set.iter().all(|diagnostic| {
2783 self.text.can_resolve(&diagnostic.range.start)
2784 && self.text.can_resolve(&diagnostic.range.end)
2785 }),
2786 Operation::UpdateSelections { selections, .. } => selections
2787 .iter()
2788 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2789 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2790 }
2791 }
2792
2793 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2794 match operation {
2795 Operation::Buffer(_) => {
2796 unreachable!("buffer operations should never be applied at this layer")
2797 }
2798 Operation::UpdateDiagnostics {
2799 server_id,
2800 diagnostics: diagnostic_set,
2801 lamport_timestamp,
2802 } => {
2803 let snapshot = self.snapshot();
2804 self.apply_diagnostic_update(
2805 server_id,
2806 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2807 lamport_timestamp,
2808 cx,
2809 );
2810 }
2811 Operation::UpdateSelections {
2812 selections,
2813 lamport_timestamp,
2814 line_mode,
2815 cursor_shape,
2816 } => {
2817 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2818 && set.lamport_timestamp > lamport_timestamp
2819 {
2820 return;
2821 }
2822
2823 self.remote_selections.insert(
2824 lamport_timestamp.replica_id,
2825 SelectionSet {
2826 selections,
2827 lamport_timestamp,
2828 line_mode,
2829 cursor_shape,
2830 },
2831 );
2832 self.text.lamport_clock.observe(lamport_timestamp);
2833 self.non_text_state_update_count += 1;
2834 }
2835 Operation::UpdateCompletionTriggers {
2836 triggers,
2837 lamport_timestamp,
2838 server_id,
2839 } => {
2840 if triggers.is_empty() {
2841 self.completion_triggers_per_language_server
2842 .remove(&server_id);
2843 self.completion_triggers = self
2844 .completion_triggers_per_language_server
2845 .values()
2846 .flat_map(|triggers| triggers.iter().cloned())
2847 .collect();
2848 } else {
2849 self.completion_triggers_per_language_server
2850 .insert(server_id, triggers.iter().cloned().collect());
2851 self.completion_triggers.extend(triggers);
2852 }
2853 self.text.lamport_clock.observe(lamport_timestamp);
2854 }
2855 Operation::UpdateLineEnding {
2856 line_ending,
2857 lamport_timestamp,
2858 } => {
2859 self.text.set_line_ending(line_ending);
2860 self.text.lamport_clock.observe(lamport_timestamp);
2861 }
2862 }
2863 }
2864
2865 fn apply_diagnostic_update(
2866 &mut self,
2867 server_id: LanguageServerId,
2868 diagnostics: DiagnosticSet,
2869 lamport_timestamp: clock::Lamport,
2870 cx: &mut Context<Self>,
2871 ) {
2872 if lamport_timestamp > self.diagnostics_timestamp {
2873 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2874 if diagnostics.is_empty() {
2875 if let Ok(ix) = ix {
2876 self.diagnostics.remove(ix);
2877 }
2878 } else {
2879 match ix {
2880 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2881 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2882 };
2883 }
2884 self.diagnostics_timestamp = lamport_timestamp;
2885 self.non_text_state_update_count += 1;
2886 self.text.lamport_clock.observe(lamport_timestamp);
2887 cx.notify();
2888 cx.emit(BufferEvent::DiagnosticsUpdated);
2889 }
2890 }
2891
2892 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2893 self.was_changed();
2894 cx.emit(BufferEvent::Operation {
2895 operation,
2896 is_local,
2897 });
2898 }
2899
2900 /// Removes the selections for a given peer.
2901 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2902 self.remote_selections.remove(&replica_id);
2903 cx.notify();
2904 }
2905
2906 /// Undoes the most recent transaction.
2907 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2908 let was_dirty = self.is_dirty();
2909 let old_version = self.version.clone();
2910
2911 if let Some((transaction_id, operation)) = self.text.undo() {
2912 self.send_operation(Operation::Buffer(operation), true, cx);
2913 self.did_edit(&old_version, was_dirty, cx);
2914 Some(transaction_id)
2915 } else {
2916 None
2917 }
2918 }
2919
2920 /// Manually undoes a specific transaction in the buffer's undo history.
2921 pub fn undo_transaction(
2922 &mut self,
2923 transaction_id: TransactionId,
2924 cx: &mut Context<Self>,
2925 ) -> bool {
2926 let was_dirty = self.is_dirty();
2927 let old_version = self.version.clone();
2928 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2929 self.send_operation(Operation::Buffer(operation), true, cx);
2930 self.did_edit(&old_version, was_dirty, cx);
2931 true
2932 } else {
2933 false
2934 }
2935 }
2936
2937 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2938 pub fn undo_to_transaction(
2939 &mut self,
2940 transaction_id: TransactionId,
2941 cx: &mut Context<Self>,
2942 ) -> bool {
2943 let was_dirty = self.is_dirty();
2944 let old_version = self.version.clone();
2945
2946 let operations = self.text.undo_to_transaction(transaction_id);
2947 let undone = !operations.is_empty();
2948 for operation in operations {
2949 self.send_operation(Operation::Buffer(operation), true, cx);
2950 }
2951 if undone {
2952 self.did_edit(&old_version, was_dirty, cx)
2953 }
2954 undone
2955 }
2956
2957 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2958 let was_dirty = self.is_dirty();
2959 let operation = self.text.undo_operations(counts);
2960 let old_version = self.version.clone();
2961 self.send_operation(Operation::Buffer(operation), true, cx);
2962 self.did_edit(&old_version, was_dirty, cx);
2963 }
2964
2965 /// Manually redoes a specific transaction in the buffer's redo history.
2966 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2967 let was_dirty = self.is_dirty();
2968 let old_version = self.version.clone();
2969
2970 if let Some((transaction_id, operation)) = self.text.redo() {
2971 self.send_operation(Operation::Buffer(operation), true, cx);
2972 self.did_edit(&old_version, was_dirty, cx);
2973 Some(transaction_id)
2974 } else {
2975 None
2976 }
2977 }
2978
2979 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2980 pub fn redo_to_transaction(
2981 &mut self,
2982 transaction_id: TransactionId,
2983 cx: &mut Context<Self>,
2984 ) -> bool {
2985 let was_dirty = self.is_dirty();
2986 let old_version = self.version.clone();
2987
2988 let operations = self.text.redo_to_transaction(transaction_id);
2989 let redone = !operations.is_empty();
2990 for operation in operations {
2991 self.send_operation(Operation::Buffer(operation), true, cx);
2992 }
2993 if redone {
2994 self.did_edit(&old_version, was_dirty, cx)
2995 }
2996 redone
2997 }
2998
2999 /// Override current completion triggers with the user-provided completion triggers.
3000 pub fn set_completion_triggers(
3001 &mut self,
3002 server_id: LanguageServerId,
3003 triggers: BTreeSet<String>,
3004 cx: &mut Context<Self>,
3005 ) {
3006 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3007 if triggers.is_empty() {
3008 self.completion_triggers_per_language_server
3009 .remove(&server_id);
3010 self.completion_triggers = self
3011 .completion_triggers_per_language_server
3012 .values()
3013 .flat_map(|triggers| triggers.iter().cloned())
3014 .collect();
3015 } else {
3016 self.completion_triggers_per_language_server
3017 .insert(server_id, triggers.clone());
3018 self.completion_triggers.extend(triggers.iter().cloned());
3019 }
3020 self.send_operation(
3021 Operation::UpdateCompletionTriggers {
3022 triggers: triggers.into_iter().collect(),
3023 lamport_timestamp: self.completion_triggers_timestamp,
3024 server_id,
3025 },
3026 true,
3027 cx,
3028 );
3029 cx.notify();
3030 }
3031
3032 /// Returns a list of strings which trigger a completion menu for this language.
3033 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3034 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3035 &self.completion_triggers
3036 }
3037
3038 /// Call this directly after performing edits to prevent the preview tab
3039 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3040 /// to return false until there are additional edits.
3041 pub fn refresh_preview(&mut self) {
3042 self.preview_version = self.version.clone();
3043 }
3044
3045 /// Whether we should preserve the preview status of a tab containing this buffer.
3046 pub fn preserve_preview(&self) -> bool {
3047 !self.has_edits_since(&self.preview_version)
3048 }
3049}
3050
3051#[doc(hidden)]
3052#[cfg(any(test, feature = "test-support"))]
3053impl Buffer {
3054 pub fn edit_via_marked_text(
3055 &mut self,
3056 marked_string: &str,
3057 autoindent_mode: Option<AutoindentMode>,
3058 cx: &mut Context<Self>,
3059 ) {
3060 let edits = self.edits_for_marked_text(marked_string);
3061 self.edit(edits, autoindent_mode, cx);
3062 }
3063
3064 pub fn set_group_interval(&mut self, group_interval: Duration) {
3065 self.text.set_group_interval(group_interval);
3066 }
3067
3068 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3069 where
3070 T: rand::Rng,
3071 {
3072 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3073 let mut last_end = None;
3074 for _ in 0..old_range_count {
3075 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3076 break;
3077 }
3078
3079 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3080 let mut range = self.random_byte_range(new_start, rng);
3081 if rng.random_bool(0.2) {
3082 mem::swap(&mut range.start, &mut range.end);
3083 }
3084 last_end = Some(range.end);
3085
3086 let new_text_len = rng.random_range(0..10);
3087 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3088 new_text = new_text.to_uppercase();
3089
3090 edits.push((range, new_text));
3091 }
3092 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3093 self.edit(edits, None, cx);
3094 }
3095
3096 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3097 let was_dirty = self.is_dirty();
3098 let old_version = self.version.clone();
3099
3100 let ops = self.text.randomly_undo_redo(rng);
3101 if !ops.is_empty() {
3102 for op in ops {
3103 self.send_operation(Operation::Buffer(op), true, cx);
3104 self.did_edit(&old_version, was_dirty, cx);
3105 }
3106 }
3107 }
3108}
3109
3110impl EventEmitter<BufferEvent> for Buffer {}
3111
3112impl Deref for Buffer {
3113 type Target = TextBuffer;
3114
3115 fn deref(&self) -> &Self::Target {
3116 &self.text
3117 }
3118}
3119
3120impl BufferSnapshot {
3121 /// Returns [`IndentSize`] for a given line that respects user settings and
3122 /// language preferences.
3123 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3124 indent_size_for_line(self, row)
3125 }
3126
3127 /// Returns [`IndentSize`] for a given position that respects user settings
3128 /// and language preferences.
3129 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3130 let settings = language_settings(
3131 self.language_at(position).map(|l| l.name()),
3132 self.file(),
3133 cx,
3134 );
3135 if settings.hard_tabs {
3136 IndentSize::tab()
3137 } else {
3138 IndentSize::spaces(settings.tab_size.get())
3139 }
3140 }
3141
3142 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3143 /// is passed in as `single_indent_size`.
3144 pub fn suggested_indents(
3145 &self,
3146 rows: impl Iterator<Item = u32>,
3147 single_indent_size: IndentSize,
3148 ) -> BTreeMap<u32, IndentSize> {
3149 let mut result = BTreeMap::new();
3150
3151 for row_range in contiguous_ranges(rows, 10) {
3152 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3153 Some(suggestions) => suggestions,
3154 _ => break,
3155 };
3156
3157 for (row, suggestion) in row_range.zip(suggestions) {
3158 let indent_size = if let Some(suggestion) = suggestion {
3159 result
3160 .get(&suggestion.basis_row)
3161 .copied()
3162 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3163 .with_delta(suggestion.delta, single_indent_size)
3164 } else {
3165 self.indent_size_for_line(row)
3166 };
3167
3168 result.insert(row, indent_size);
3169 }
3170 }
3171
3172 result
3173 }
3174
3175 fn suggest_autoindents(
3176 &self,
3177 row_range: Range<u32>,
3178 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3179 let config = &self.language.as_ref()?.config;
3180 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3181
3182 #[derive(Debug, Clone)]
3183 struct StartPosition {
3184 start: Point,
3185 suffix: SharedString,
3186 }
3187
3188 // Find the suggested indentation ranges based on the syntax tree.
3189 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3190 let end = Point::new(row_range.end, 0);
3191 let range = (start..end).to_offset(&self.text);
3192 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3193 Some(&grammar.indents_config.as_ref()?.query)
3194 });
3195 let indent_configs = matches
3196 .grammars()
3197 .iter()
3198 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3199 .collect::<Vec<_>>();
3200
3201 let mut indent_ranges = Vec::<Range<Point>>::new();
3202 let mut start_positions = Vec::<StartPosition>::new();
3203 let mut outdent_positions = Vec::<Point>::new();
3204 while let Some(mat) = matches.peek() {
3205 let mut start: Option<Point> = None;
3206 let mut end: Option<Point> = None;
3207
3208 let config = indent_configs[mat.grammar_index];
3209 for capture in mat.captures {
3210 if capture.index == config.indent_capture_ix {
3211 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3212 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3213 } else if Some(capture.index) == config.start_capture_ix {
3214 start = Some(Point::from_ts_point(capture.node.end_position()));
3215 } else if Some(capture.index) == config.end_capture_ix {
3216 end = Some(Point::from_ts_point(capture.node.start_position()));
3217 } else if Some(capture.index) == config.outdent_capture_ix {
3218 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3219 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3220 start_positions.push(StartPosition {
3221 start: Point::from_ts_point(capture.node.start_position()),
3222 suffix: suffix.clone(),
3223 });
3224 }
3225 }
3226
3227 matches.advance();
3228 if let Some((start, end)) = start.zip(end) {
3229 if start.row == end.row {
3230 continue;
3231 }
3232 let range = start..end;
3233 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3234 Err(ix) => indent_ranges.insert(ix, range),
3235 Ok(ix) => {
3236 let prev_range = &mut indent_ranges[ix];
3237 prev_range.end = prev_range.end.max(range.end);
3238 }
3239 }
3240 }
3241 }
3242
3243 let mut error_ranges = Vec::<Range<Point>>::new();
3244 let mut matches = self
3245 .syntax
3246 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3247 while let Some(mat) = matches.peek() {
3248 let node = mat.captures[0].node;
3249 let start = Point::from_ts_point(node.start_position());
3250 let end = Point::from_ts_point(node.end_position());
3251 let range = start..end;
3252 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3253 Ok(ix) | Err(ix) => ix,
3254 };
3255 let mut end_ix = ix;
3256 while let Some(existing_range) = error_ranges.get(end_ix) {
3257 if existing_range.end < end {
3258 end_ix += 1;
3259 } else {
3260 break;
3261 }
3262 }
3263 error_ranges.splice(ix..end_ix, [range]);
3264 matches.advance();
3265 }
3266
3267 outdent_positions.sort();
3268 for outdent_position in outdent_positions {
3269 // find the innermost indent range containing this outdent_position
3270 // set its end to the outdent position
3271 if let Some(range_to_truncate) = indent_ranges
3272 .iter_mut()
3273 .filter(|indent_range| indent_range.contains(&outdent_position))
3274 .next_back()
3275 {
3276 range_to_truncate.end = outdent_position;
3277 }
3278 }
3279
3280 start_positions.sort_by_key(|b| b.start);
3281
3282 // Find the suggested indentation increases and decreased based on regexes.
3283 let mut regex_outdent_map = HashMap::default();
3284 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3285 let mut start_positions_iter = start_positions.iter().peekable();
3286
3287 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3288 self.for_each_line(
3289 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3290 ..Point::new(row_range.end, 0),
3291 |row, line| {
3292 if config
3293 .decrease_indent_pattern
3294 .as_ref()
3295 .is_some_and(|regex| regex.is_match(line))
3296 {
3297 indent_change_rows.push((row, Ordering::Less));
3298 }
3299 if config
3300 .increase_indent_pattern
3301 .as_ref()
3302 .is_some_and(|regex| regex.is_match(line))
3303 {
3304 indent_change_rows.push((row + 1, Ordering::Greater));
3305 }
3306 while let Some(pos) = start_positions_iter.peek() {
3307 if pos.start.row < row {
3308 let pos = start_positions_iter.next().unwrap();
3309 last_seen_suffix
3310 .entry(pos.suffix.to_string())
3311 .or_default()
3312 .push(pos.start);
3313 } else {
3314 break;
3315 }
3316 }
3317 for rule in &config.decrease_indent_patterns {
3318 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3319 let row_start_column = self.indent_size_for_line(row).len;
3320 let basis_row = rule
3321 .valid_after
3322 .iter()
3323 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3324 .flatten()
3325 .filter(|start_point| start_point.column <= row_start_column)
3326 .max_by_key(|start_point| start_point.row);
3327 if let Some(outdent_to_row) = basis_row {
3328 regex_outdent_map.insert(row, outdent_to_row.row);
3329 }
3330 break;
3331 }
3332 }
3333 },
3334 );
3335
3336 let mut indent_changes = indent_change_rows.into_iter().peekable();
3337 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3338 prev_non_blank_row.unwrap_or(0)
3339 } else {
3340 row_range.start.saturating_sub(1)
3341 };
3342
3343 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3344 Some(row_range.map(move |row| {
3345 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3346
3347 let mut indent_from_prev_row = false;
3348 let mut outdent_from_prev_row = false;
3349 let mut outdent_to_row = u32::MAX;
3350 let mut from_regex = false;
3351
3352 while let Some((indent_row, delta)) = indent_changes.peek() {
3353 match indent_row.cmp(&row) {
3354 Ordering::Equal => match delta {
3355 Ordering::Less => {
3356 from_regex = true;
3357 outdent_from_prev_row = true
3358 }
3359 Ordering::Greater => {
3360 indent_from_prev_row = true;
3361 from_regex = true
3362 }
3363 _ => {}
3364 },
3365
3366 Ordering::Greater => break,
3367 Ordering::Less => {}
3368 }
3369
3370 indent_changes.next();
3371 }
3372
3373 for range in &indent_ranges {
3374 if range.start.row >= row {
3375 break;
3376 }
3377 if range.start.row == prev_row && range.end > row_start {
3378 indent_from_prev_row = true;
3379 }
3380 if range.end > prev_row_start && range.end <= row_start {
3381 outdent_to_row = outdent_to_row.min(range.start.row);
3382 }
3383 }
3384
3385 if let Some(basis_row) = regex_outdent_map.get(&row) {
3386 indent_from_prev_row = false;
3387 outdent_to_row = *basis_row;
3388 from_regex = true;
3389 }
3390
3391 let within_error = error_ranges
3392 .iter()
3393 .any(|e| e.start.row < row && e.end > row_start);
3394
3395 let suggestion = if outdent_to_row == prev_row
3396 || (outdent_from_prev_row && indent_from_prev_row)
3397 {
3398 Some(IndentSuggestion {
3399 basis_row: prev_row,
3400 delta: Ordering::Equal,
3401 within_error: within_error && !from_regex,
3402 })
3403 } else if indent_from_prev_row {
3404 Some(IndentSuggestion {
3405 basis_row: prev_row,
3406 delta: Ordering::Greater,
3407 within_error: within_error && !from_regex,
3408 })
3409 } else if outdent_to_row < prev_row {
3410 Some(IndentSuggestion {
3411 basis_row: outdent_to_row,
3412 delta: Ordering::Equal,
3413 within_error: within_error && !from_regex,
3414 })
3415 } else if outdent_from_prev_row {
3416 Some(IndentSuggestion {
3417 basis_row: prev_row,
3418 delta: Ordering::Less,
3419 within_error: within_error && !from_regex,
3420 })
3421 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3422 {
3423 Some(IndentSuggestion {
3424 basis_row: prev_row,
3425 delta: Ordering::Equal,
3426 within_error: within_error && !from_regex,
3427 })
3428 } else {
3429 None
3430 };
3431
3432 prev_row = row;
3433 prev_row_start = row_start;
3434 suggestion
3435 }))
3436 }
3437
3438 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3439 while row > 0 {
3440 row -= 1;
3441 if !self.is_line_blank(row) {
3442 return Some(row);
3443 }
3444 }
3445 None
3446 }
3447
3448 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3449 let captures = self.syntax.captures(range, &self.text, |grammar| {
3450 grammar
3451 .highlights_config
3452 .as_ref()
3453 .map(|config| &config.query)
3454 });
3455 let highlight_maps = captures
3456 .grammars()
3457 .iter()
3458 .map(|grammar| grammar.highlight_map())
3459 .collect();
3460 (captures, highlight_maps)
3461 }
3462
3463 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3464 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3465 /// returned in chunks where each chunk has a single syntax highlighting style and
3466 /// diagnostic status.
3467 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3468 let range = range.start.to_offset(self)..range.end.to_offset(self);
3469
3470 let mut syntax = None;
3471 if language_aware {
3472 syntax = Some(self.get_highlights(range.clone()));
3473 }
3474 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3475 let diagnostics = language_aware;
3476 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3477 }
3478
3479 pub fn highlighted_text_for_range<T: ToOffset>(
3480 &self,
3481 range: Range<T>,
3482 override_style: Option<HighlightStyle>,
3483 syntax_theme: &SyntaxTheme,
3484 ) -> HighlightedText {
3485 HighlightedText::from_buffer_range(
3486 range,
3487 &self.text,
3488 &self.syntax,
3489 override_style,
3490 syntax_theme,
3491 )
3492 }
3493
3494 /// Invokes the given callback for each line of text in the given range of the buffer.
3495 /// Uses callback to avoid allocating a string for each line.
3496 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3497 let mut line = String::new();
3498 let mut row = range.start.row;
3499 for chunk in self
3500 .as_rope()
3501 .chunks_in_range(range.to_offset(self))
3502 .chain(["\n"])
3503 {
3504 for (newline_ix, text) in chunk.split('\n').enumerate() {
3505 if newline_ix > 0 {
3506 callback(row, &line);
3507 row += 1;
3508 line.clear();
3509 }
3510 line.push_str(text);
3511 }
3512 }
3513 }
3514
3515 /// Iterates over every [`SyntaxLayer`] in the buffer.
3516 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3517 self.syntax_layers_for_range(0..self.len(), true)
3518 }
3519
3520 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3521 let offset = position.to_offset(self);
3522 self.syntax_layers_for_range(offset..offset, false)
3523 .filter(|l| {
3524 if let Some(ranges) = l.included_sub_ranges {
3525 ranges.iter().any(|range| {
3526 let start = range.start.to_offset(self);
3527 start <= offset && {
3528 let end = range.end.to_offset(self);
3529 offset < end
3530 }
3531 })
3532 } else {
3533 l.node().start_byte() <= offset && l.node().end_byte() > offset
3534 }
3535 })
3536 .last()
3537 }
3538
3539 pub fn syntax_layers_for_range<D: ToOffset>(
3540 &self,
3541 range: Range<D>,
3542 include_hidden: bool,
3543 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3544 self.syntax
3545 .layers_for_range(range, &self.text, include_hidden)
3546 }
3547
3548 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3549 &self,
3550 range: Range<D>,
3551 ) -> Option<SyntaxLayer<'_>> {
3552 let range = range.to_offset(self);
3553 self.syntax
3554 .layers_for_range(range, &self.text, false)
3555 .max_by(|a, b| {
3556 if a.depth != b.depth {
3557 a.depth.cmp(&b.depth)
3558 } else if a.offset.0 != b.offset.0 {
3559 a.offset.0.cmp(&b.offset.0)
3560 } else {
3561 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3562 }
3563 })
3564 }
3565
3566 /// Returns the main [`Language`].
3567 pub fn language(&self) -> Option<&Arc<Language>> {
3568 self.language.as_ref()
3569 }
3570
3571 /// Returns the [`Language`] at the given location.
3572 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3573 self.syntax_layer_at(position)
3574 .map(|info| info.language)
3575 .or(self.language.as_ref())
3576 }
3577
3578 /// Returns the settings for the language at the given location.
3579 pub fn settings_at<'a, D: ToOffset>(
3580 &'a self,
3581 position: D,
3582 cx: &'a App,
3583 ) -> Cow<'a, LanguageSettings> {
3584 language_settings(
3585 self.language_at(position).map(|l| l.name()),
3586 self.file.as_ref(),
3587 cx,
3588 )
3589 }
3590
3591 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3592 CharClassifier::new(self.language_scope_at(point))
3593 }
3594
3595 /// Returns the [`LanguageScope`] at the given location.
3596 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3597 let offset = position.to_offset(self);
3598 let mut scope = None;
3599 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3600
3601 // Use the layer that has the smallest node intersecting the given point.
3602 for layer in self
3603 .syntax
3604 .layers_for_range(offset..offset, &self.text, false)
3605 {
3606 let mut cursor = layer.node().walk();
3607
3608 let mut range = None;
3609 loop {
3610 let child_range = cursor.node().byte_range();
3611 if !child_range.contains(&offset) {
3612 break;
3613 }
3614
3615 range = Some(child_range);
3616 if cursor.goto_first_child_for_byte(offset).is_none() {
3617 break;
3618 }
3619 }
3620
3621 if let Some(range) = range
3622 && smallest_range_and_depth.as_ref().is_none_or(
3623 |(smallest_range, smallest_range_depth)| {
3624 if layer.depth > *smallest_range_depth {
3625 true
3626 } else if layer.depth == *smallest_range_depth {
3627 range.len() < smallest_range.len()
3628 } else {
3629 false
3630 }
3631 },
3632 )
3633 {
3634 smallest_range_and_depth = Some((range, layer.depth));
3635 scope = Some(LanguageScope {
3636 language: layer.language.clone(),
3637 override_id: layer.override_id(offset, &self.text),
3638 });
3639 }
3640 }
3641
3642 scope.or_else(|| {
3643 self.language.clone().map(|language| LanguageScope {
3644 language,
3645 override_id: None,
3646 })
3647 })
3648 }
3649
3650 /// Returns a tuple of the range and character kind of the word
3651 /// surrounding the given position.
3652 pub fn surrounding_word<T: ToOffset>(
3653 &self,
3654 start: T,
3655 scope_context: Option<CharScopeContext>,
3656 ) -> (Range<usize>, Option<CharKind>) {
3657 let mut start = start.to_offset(self);
3658 let mut end = start;
3659 let mut next_chars = self.chars_at(start).take(128).peekable();
3660 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3661
3662 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3663 let word_kind = cmp::max(
3664 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3665 next_chars.peek().copied().map(|c| classifier.kind(c)),
3666 );
3667
3668 for ch in prev_chars {
3669 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3670 start -= ch.len_utf8();
3671 } else {
3672 break;
3673 }
3674 }
3675
3676 for ch in next_chars {
3677 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3678 end += ch.len_utf8();
3679 } else {
3680 break;
3681 }
3682 }
3683
3684 (start..end, word_kind)
3685 }
3686
3687 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3688 /// range. When `require_larger` is true, the node found must be larger than the query range.
3689 ///
3690 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3691 /// be moved to the root of the tree.
3692 fn goto_node_enclosing_range(
3693 cursor: &mut tree_sitter::TreeCursor,
3694 query_range: &Range<usize>,
3695 require_larger: bool,
3696 ) -> bool {
3697 let mut ascending = false;
3698 loop {
3699 let mut range = cursor.node().byte_range();
3700 if query_range.is_empty() {
3701 // When the query range is empty and the current node starts after it, move to the
3702 // previous sibling to find the node the containing node.
3703 if range.start > query_range.start {
3704 cursor.goto_previous_sibling();
3705 range = cursor.node().byte_range();
3706 }
3707 } else {
3708 // When the query range is non-empty and the current node ends exactly at the start,
3709 // move to the next sibling to find a node that extends beyond the start.
3710 if range.end == query_range.start {
3711 cursor.goto_next_sibling();
3712 range = cursor.node().byte_range();
3713 }
3714 }
3715
3716 let encloses = range.contains_inclusive(query_range)
3717 && (!require_larger || range.len() > query_range.len());
3718 if !encloses {
3719 ascending = true;
3720 if !cursor.goto_parent() {
3721 return false;
3722 }
3723 continue;
3724 } else if ascending {
3725 return true;
3726 }
3727
3728 // Descend into the current node.
3729 if cursor
3730 .goto_first_child_for_byte(query_range.start)
3731 .is_none()
3732 {
3733 return true;
3734 }
3735 }
3736 }
3737
3738 pub fn syntax_ancestor<'a, T: ToOffset>(
3739 &'a self,
3740 range: Range<T>,
3741 ) -> Option<tree_sitter::Node<'a>> {
3742 let range = range.start.to_offset(self)..range.end.to_offset(self);
3743 let mut result: Option<tree_sitter::Node<'a>> = None;
3744 for layer in self
3745 .syntax
3746 .layers_for_range(range.clone(), &self.text, true)
3747 {
3748 let mut cursor = layer.node().walk();
3749
3750 // Find the node that both contains the range and is larger than it.
3751 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3752 continue;
3753 }
3754
3755 let left_node = cursor.node();
3756 let mut layer_result = left_node;
3757
3758 // For an empty range, try to find another node immediately to the right of the range.
3759 if left_node.end_byte() == range.start {
3760 let mut right_node = None;
3761 while !cursor.goto_next_sibling() {
3762 if !cursor.goto_parent() {
3763 break;
3764 }
3765 }
3766
3767 while cursor.node().start_byte() == range.start {
3768 right_node = Some(cursor.node());
3769 if !cursor.goto_first_child() {
3770 break;
3771 }
3772 }
3773
3774 // If there is a candidate node on both sides of the (empty) range, then
3775 // decide between the two by favoring a named node over an anonymous token.
3776 // If both nodes are the same in that regard, favor the right one.
3777 if let Some(right_node) = right_node
3778 && (right_node.is_named() || !left_node.is_named())
3779 {
3780 layer_result = right_node;
3781 }
3782 }
3783
3784 if let Some(previous_result) = &result
3785 && previous_result.byte_range().len() < layer_result.byte_range().len()
3786 {
3787 continue;
3788 }
3789 result = Some(layer_result);
3790 }
3791
3792 result
3793 }
3794
3795 /// Find the previous sibling syntax node at the given range.
3796 ///
3797 /// This function locates the syntax node that precedes the node containing
3798 /// the given range. It searches hierarchically by:
3799 /// 1. Finding the node that contains the given range
3800 /// 2. Looking for the previous sibling at the same tree level
3801 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3802 ///
3803 /// Returns `None` if there is no previous sibling at any ancestor level.
3804 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3805 &'a self,
3806 range: Range<T>,
3807 ) -> Option<tree_sitter::Node<'a>> {
3808 let range = range.start.to_offset(self)..range.end.to_offset(self);
3809 let mut result: Option<tree_sitter::Node<'a>> = None;
3810
3811 for layer in self
3812 .syntax
3813 .layers_for_range(range.clone(), &self.text, true)
3814 {
3815 let mut cursor = layer.node().walk();
3816
3817 // Find the node that contains the range
3818 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3819 continue;
3820 }
3821
3822 // Look for the previous sibling, moving up ancestor levels if needed
3823 loop {
3824 if cursor.goto_previous_sibling() {
3825 let layer_result = cursor.node();
3826
3827 if let Some(previous_result) = &result {
3828 if previous_result.byte_range().end < layer_result.byte_range().end {
3829 continue;
3830 }
3831 }
3832 result = Some(layer_result);
3833 break;
3834 }
3835
3836 // No sibling found at this level, try moving up to parent
3837 if !cursor.goto_parent() {
3838 break;
3839 }
3840 }
3841 }
3842
3843 result
3844 }
3845
3846 /// Find the next sibling syntax node at the given range.
3847 ///
3848 /// This function locates the syntax node that follows the node containing
3849 /// the given range. It searches hierarchically by:
3850 /// 1. Finding the node that contains the given range
3851 /// 2. Looking for the next sibling at the same tree level
3852 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3853 ///
3854 /// Returns `None` if there is no next sibling at any ancestor level.
3855 pub fn syntax_next_sibling<'a, T: ToOffset>(
3856 &'a self,
3857 range: Range<T>,
3858 ) -> Option<tree_sitter::Node<'a>> {
3859 let range = range.start.to_offset(self)..range.end.to_offset(self);
3860 let mut result: Option<tree_sitter::Node<'a>> = None;
3861
3862 for layer in self
3863 .syntax
3864 .layers_for_range(range.clone(), &self.text, true)
3865 {
3866 let mut cursor = layer.node().walk();
3867
3868 // Find the node that contains the range
3869 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3870 continue;
3871 }
3872
3873 // Look for the next sibling, moving up ancestor levels if needed
3874 loop {
3875 if cursor.goto_next_sibling() {
3876 let layer_result = cursor.node();
3877
3878 if let Some(previous_result) = &result {
3879 if previous_result.byte_range().start > layer_result.byte_range().start {
3880 continue;
3881 }
3882 }
3883 result = Some(layer_result);
3884 break;
3885 }
3886
3887 // No sibling found at this level, try moving up to parent
3888 if !cursor.goto_parent() {
3889 break;
3890 }
3891 }
3892 }
3893
3894 result
3895 }
3896
3897 /// Returns the root syntax node within the given row
3898 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3899 let start_offset = position.to_offset(self);
3900
3901 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3902
3903 let layer = self
3904 .syntax
3905 .layers_for_range(start_offset..start_offset, &self.text, true)
3906 .next()?;
3907
3908 let mut cursor = layer.node().walk();
3909
3910 // Descend to the first leaf that touches the start of the range.
3911 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3912 if cursor.node().end_byte() == start_offset {
3913 cursor.goto_next_sibling();
3914 }
3915 }
3916
3917 // Ascend to the root node within the same row.
3918 while cursor.goto_parent() {
3919 if cursor.node().start_position().row != row {
3920 break;
3921 }
3922 }
3923
3924 Some(cursor.node())
3925 }
3926
3927 /// Returns the outline for the buffer.
3928 ///
3929 /// This method allows passing an optional [`SyntaxTheme`] to
3930 /// syntax-highlight the returned symbols.
3931 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3932 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3933 }
3934
3935 /// Returns all the symbols that contain the given position.
3936 ///
3937 /// This method allows passing an optional [`SyntaxTheme`] to
3938 /// syntax-highlight the returned symbols.
3939 pub fn symbols_containing<T: ToOffset>(
3940 &self,
3941 position: T,
3942 theme: Option<&SyntaxTheme>,
3943 ) -> Vec<OutlineItem<Anchor>> {
3944 let position = position.to_offset(self);
3945 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3946 let end = self.clip_offset(position + 1, Bias::Right);
3947 let mut items = self.outline_items_containing(start..end, false, theme);
3948 let mut prev_depth = None;
3949 items.retain(|item| {
3950 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3951 prev_depth = Some(item.depth);
3952 result
3953 });
3954 items
3955 }
3956
3957 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3958 let range = range.to_offset(self);
3959 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3960 grammar.outline_config.as_ref().map(|c| &c.query)
3961 });
3962 let configs = matches
3963 .grammars()
3964 .iter()
3965 .map(|g| g.outline_config.as_ref().unwrap())
3966 .collect::<Vec<_>>();
3967
3968 while let Some(mat) = matches.peek() {
3969 let config = &configs[mat.grammar_index];
3970 let containing_item_node = maybe!({
3971 let item_node = mat.captures.iter().find_map(|cap| {
3972 if cap.index == config.item_capture_ix {
3973 Some(cap.node)
3974 } else {
3975 None
3976 }
3977 })?;
3978
3979 let item_byte_range = item_node.byte_range();
3980 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3981 None
3982 } else {
3983 Some(item_node)
3984 }
3985 });
3986
3987 if let Some(item_node) = containing_item_node {
3988 return Some(
3989 Point::from_ts_point(item_node.start_position())
3990 ..Point::from_ts_point(item_node.end_position()),
3991 );
3992 }
3993
3994 matches.advance();
3995 }
3996 None
3997 }
3998
3999 pub fn outline_items_containing<T: ToOffset>(
4000 &self,
4001 range: Range<T>,
4002 include_extra_context: bool,
4003 theme: Option<&SyntaxTheme>,
4004 ) -> Vec<OutlineItem<Anchor>> {
4005 self.outline_items_containing_internal(
4006 range,
4007 include_extra_context,
4008 theme,
4009 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4010 )
4011 }
4012
4013 pub fn outline_items_as_points_containing<T: ToOffset>(
4014 &self,
4015 range: Range<T>,
4016 include_extra_context: bool,
4017 theme: Option<&SyntaxTheme>,
4018 ) -> Vec<OutlineItem<Point>> {
4019 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4020 range
4021 })
4022 }
4023
4024 fn outline_items_containing_internal<T: ToOffset, U>(
4025 &self,
4026 range: Range<T>,
4027 include_extra_context: bool,
4028 theme: Option<&SyntaxTheme>,
4029 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4030 ) -> Vec<OutlineItem<U>> {
4031 let range = range.to_offset(self);
4032 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4033 grammar.outline_config.as_ref().map(|c| &c.query)
4034 });
4035
4036 let mut items = Vec::new();
4037 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4038 while let Some(mat) = matches.peek() {
4039 let config = matches.grammars()[mat.grammar_index]
4040 .outline_config
4041 .as_ref()
4042 .unwrap();
4043 if let Some(item) =
4044 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4045 {
4046 items.push(item);
4047 } else if let Some(capture) = mat
4048 .captures
4049 .iter()
4050 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4051 {
4052 let capture_range = capture.node.start_position()..capture.node.end_position();
4053 let mut capture_row_range =
4054 capture_range.start.row as u32..capture_range.end.row as u32;
4055 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4056 {
4057 capture_row_range.end -= 1;
4058 }
4059 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4060 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4061 last_row_range.end = capture_row_range.end;
4062 } else {
4063 annotation_row_ranges.push(capture_row_range);
4064 }
4065 } else {
4066 annotation_row_ranges.push(capture_row_range);
4067 }
4068 }
4069 matches.advance();
4070 }
4071
4072 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4073
4074 // Assign depths based on containment relationships and convert to anchors.
4075 let mut item_ends_stack = Vec::<Point>::new();
4076 let mut anchor_items = Vec::new();
4077 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4078 for item in items {
4079 while let Some(last_end) = item_ends_stack.last().copied() {
4080 if last_end < item.range.end {
4081 item_ends_stack.pop();
4082 } else {
4083 break;
4084 }
4085 }
4086
4087 let mut annotation_row_range = None;
4088 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4089 let row_preceding_item = item.range.start.row.saturating_sub(1);
4090 if next_annotation_row_range.end < row_preceding_item {
4091 annotation_row_ranges.next();
4092 } else {
4093 if next_annotation_row_range.end == row_preceding_item {
4094 annotation_row_range = Some(next_annotation_row_range.clone());
4095 annotation_row_ranges.next();
4096 }
4097 break;
4098 }
4099 }
4100
4101 anchor_items.push(OutlineItem {
4102 depth: item_ends_stack.len(),
4103 range: range_callback(self, item.range.clone()),
4104 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4105 text: item.text,
4106 highlight_ranges: item.highlight_ranges,
4107 name_ranges: item.name_ranges,
4108 body_range: item.body_range.map(|r| range_callback(self, r)),
4109 annotation_range: annotation_row_range.map(|annotation_range| {
4110 let point_range = Point::new(annotation_range.start, 0)
4111 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4112 range_callback(self, point_range)
4113 }),
4114 });
4115 item_ends_stack.push(item.range.end);
4116 }
4117
4118 anchor_items
4119 }
4120
4121 fn next_outline_item(
4122 &self,
4123 config: &OutlineConfig,
4124 mat: &SyntaxMapMatch,
4125 range: &Range<usize>,
4126 include_extra_context: bool,
4127 theme: Option<&SyntaxTheme>,
4128 ) -> Option<OutlineItem<Point>> {
4129 let item_node = mat.captures.iter().find_map(|cap| {
4130 if cap.index == config.item_capture_ix {
4131 Some(cap.node)
4132 } else {
4133 None
4134 }
4135 })?;
4136
4137 let item_byte_range = item_node.byte_range();
4138 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4139 return None;
4140 }
4141 let item_point_range = Point::from_ts_point(item_node.start_position())
4142 ..Point::from_ts_point(item_node.end_position());
4143
4144 let mut open_point = None;
4145 let mut close_point = None;
4146
4147 let mut buffer_ranges = Vec::new();
4148 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4149 let mut range = node.start_byte()..node.end_byte();
4150 let start = node.start_position();
4151 if node.end_position().row > start.row {
4152 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4153 }
4154
4155 if !range.is_empty() {
4156 buffer_ranges.push((range, node_is_name));
4157 }
4158 };
4159
4160 for capture in mat.captures {
4161 if capture.index == config.name_capture_ix {
4162 add_to_buffer_ranges(capture.node, true);
4163 } else if Some(capture.index) == config.context_capture_ix
4164 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4165 {
4166 add_to_buffer_ranges(capture.node, false);
4167 } else {
4168 if Some(capture.index) == config.open_capture_ix {
4169 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4170 } else if Some(capture.index) == config.close_capture_ix {
4171 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4172 }
4173 }
4174 }
4175
4176 if buffer_ranges.is_empty() {
4177 return None;
4178 }
4179 let source_range_for_text =
4180 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4181
4182 let mut text = String::new();
4183 let mut highlight_ranges = Vec::new();
4184 let mut name_ranges = Vec::new();
4185 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4186 let mut last_buffer_range_end = 0;
4187 for (buffer_range, is_name) in buffer_ranges {
4188 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4189 if space_added {
4190 text.push(' ');
4191 }
4192 let before_append_len = text.len();
4193 let mut offset = buffer_range.start;
4194 chunks.seek(buffer_range.clone());
4195 for mut chunk in chunks.by_ref() {
4196 if chunk.text.len() > buffer_range.end - offset {
4197 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4198 offset = buffer_range.end;
4199 } else {
4200 offset += chunk.text.len();
4201 }
4202 let style = chunk
4203 .syntax_highlight_id
4204 .zip(theme)
4205 .and_then(|(highlight, theme)| highlight.style(theme));
4206 if let Some(style) = style {
4207 let start = text.len();
4208 let end = start + chunk.text.len();
4209 highlight_ranges.push((start..end, style));
4210 }
4211 text.push_str(chunk.text);
4212 if offset >= buffer_range.end {
4213 break;
4214 }
4215 }
4216 if is_name {
4217 let after_append_len = text.len();
4218 let start = if space_added && !name_ranges.is_empty() {
4219 before_append_len - 1
4220 } else {
4221 before_append_len
4222 };
4223 name_ranges.push(start..after_append_len);
4224 }
4225 last_buffer_range_end = buffer_range.end;
4226 }
4227
4228 Some(OutlineItem {
4229 depth: 0, // We'll calculate the depth later
4230 range: item_point_range,
4231 source_range_for_text: source_range_for_text.to_point(self),
4232 text,
4233 highlight_ranges,
4234 name_ranges,
4235 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4236 annotation_range: None,
4237 })
4238 }
4239
4240 pub fn function_body_fold_ranges<T: ToOffset>(
4241 &self,
4242 within: Range<T>,
4243 ) -> impl Iterator<Item = Range<usize>> + '_ {
4244 self.text_object_ranges(within, TreeSitterOptions::default())
4245 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4246 }
4247
4248 /// For each grammar in the language, runs the provided
4249 /// [`tree_sitter::Query`] against the given range.
4250 pub fn matches(
4251 &self,
4252 range: Range<usize>,
4253 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4254 ) -> SyntaxMapMatches<'_> {
4255 self.syntax.matches(range, self, query)
4256 }
4257
4258 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4259 /// Hence, may return more bracket pairs than the range contains.
4260 ///
4261 /// Will omit known chunks.
4262 /// The resulting bracket match collections are not ordered.
4263 pub fn fetch_bracket_ranges(
4264 &self,
4265 range: Range<usize>,
4266 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4267 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4268 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4269
4270 let known_chunks = match known_chunks {
4271 Some((known_version, known_chunks)) => {
4272 if !tree_sitter_data
4273 .chunks
4274 .version()
4275 .changed_since(known_version)
4276 {
4277 known_chunks.clone()
4278 } else {
4279 HashSet::default()
4280 }
4281 }
4282 None => HashSet::default(),
4283 };
4284
4285 let mut new_bracket_matches = HashMap::default();
4286 let mut all_bracket_matches = HashMap::default();
4287 let mut bracket_matches_to_color = HashMap::default();
4288
4289 for chunk in tree_sitter_data
4290 .chunks
4291 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4292 {
4293 if known_chunks.contains(&chunk.row_range()) {
4294 continue;
4295 }
4296 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4297 continue;
4298 };
4299 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4300
4301 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4302 Some(cached_brackets) => cached_brackets,
4303 None => {
4304 let mut bracket_pairs_ends = Vec::new();
4305 let mut matches =
4306 self.syntax
4307 .matches(chunk_range.clone(), &self.text, |grammar| {
4308 grammar.brackets_config.as_ref().map(|c| &c.query)
4309 });
4310 let configs = matches
4311 .grammars()
4312 .iter()
4313 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4314 .collect::<Vec<_>>();
4315
4316 let chunk_range = chunk_range.clone();
4317 let tree_sitter_matches = iter::from_fn(|| {
4318 while let Some(mat) = matches.peek() {
4319 let mut open = None;
4320 let mut close = None;
4321 let depth = mat.depth;
4322 let config = configs[mat.grammar_index];
4323 let pattern = &config.patterns[mat.pattern_index];
4324 for capture in mat.captures {
4325 if capture.index == config.open_capture_ix {
4326 open = Some(capture.node.byte_range());
4327 } else if capture.index == config.close_capture_ix {
4328 close = Some(capture.node.byte_range());
4329 }
4330 }
4331
4332 matches.advance();
4333
4334 let Some((open_range, close_range)) = open.zip(close) else {
4335 continue;
4336 };
4337
4338 let bracket_range = open_range.start..=close_range.end;
4339 if !bracket_range.overlaps(&chunk_range) {
4340 continue;
4341 }
4342
4343 if !pattern.rainbow_exclude
4344 // Also, certain languages have "brackets" that are not brackets, e.g. tags. and such
4345 // bracket will match the entire tag with all text inside.
4346 // For now, avoid highlighting any pair that has more than single char in each bracket.
4347 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4348 && (open_range.len() == 1 || close_range.len() == 1)
4349 {
4350 // Certain tree-sitter grammars may return more bracket pairs than needed:
4351 // see `test_markdown_bracket_colorization` for a set-up that returns pairs with the same start bracket and different end one.
4352 // Pick the pair with the shortest range in case of ambiguity.
4353 match bracket_matches_to_color.entry(open_range.clone()) {
4354 hash_map::Entry::Vacant(v) => {
4355 v.insert(close_range.clone());
4356 }
4357 hash_map::Entry::Occupied(mut o) => {
4358 let previous_close_range = o.get();
4359 let previous_length =
4360 previous_close_range.end - open_range.start;
4361 let new_length = close_range.end - open_range.start;
4362 if new_length < previous_length {
4363 o.insert(close_range.clone());
4364 }
4365 }
4366 }
4367 }
4368 return Some((open_range, close_range, pattern, depth));
4369 }
4370 None
4371 })
4372 .sorted_by_key(|(open_range, _, _, _)| open_range.start)
4373 .collect::<Vec<_>>();
4374
4375 let new_matches = tree_sitter_matches
4376 .into_iter()
4377 .map(|(open_range, close_range, pattern, syntax_layer_depth)| {
4378 let participates_in_colorizing =
4379 bracket_matches_to_color.get(&open_range).is_some_and(
4380 |close_range_to_color| close_range_to_color == &close_range,
4381 );
4382 let color_index = if participates_in_colorizing {
4383 while let Some(&last_bracket_end) = bracket_pairs_ends.last() {
4384 if last_bracket_end <= open_range.start {
4385 bracket_pairs_ends.pop();
4386 } else {
4387 break;
4388 }
4389 }
4390
4391 let bracket_depth = bracket_pairs_ends.len();
4392 bracket_pairs_ends.push(close_range.end);
4393 Some(bracket_depth)
4394 } else {
4395 None
4396 };
4397
4398 BracketMatch {
4399 open_range,
4400 close_range,
4401 syntax_layer_depth,
4402 newline_only: pattern.newline_only,
4403 color_index,
4404 }
4405 })
4406 .collect::<Vec<_>>();
4407
4408 new_bracket_matches.insert(chunk.id, new_matches.clone());
4409 new_matches
4410 }
4411 };
4412 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4413 }
4414
4415 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4416 if latest_tree_sitter_data.chunks.version() == &self.version {
4417 for (chunk_id, new_matches) in new_bracket_matches {
4418 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4419 if old_chunks.is_none() {
4420 *old_chunks = Some(new_matches);
4421 }
4422 }
4423 }
4424
4425 all_bracket_matches
4426 }
4427
4428 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4429 let mut tree_sitter_data = self.tree_sitter_data.lock();
4430 if self
4431 .version
4432 .changed_since(tree_sitter_data.chunks.version())
4433 {
4434 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4435 }
4436 tree_sitter_data
4437 }
4438
4439 pub fn all_bracket_ranges(
4440 &self,
4441 range: Range<usize>,
4442 ) -> impl Iterator<Item = BracketMatch<usize>> {
4443 self.fetch_bracket_ranges(range.clone(), None)
4444 .into_values()
4445 .flatten()
4446 .filter(move |bracket_match| {
4447 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4448 bracket_range.overlaps(&range)
4449 })
4450 }
4451
4452 /// Returns bracket range pairs overlapping or adjacent to `range`
4453 pub fn bracket_ranges<T: ToOffset>(
4454 &self,
4455 range: Range<T>,
4456 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4457 // Find bracket pairs that *inclusively* contain the given range.
4458 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4459 self.all_bracket_ranges(range)
4460 .filter(|pair| !pair.newline_only)
4461 }
4462
4463 pub fn debug_variables_query<T: ToOffset>(
4464 &self,
4465 range: Range<T>,
4466 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4467 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4468
4469 let mut matches = self.syntax.matches_with_options(
4470 range.clone(),
4471 &self.text,
4472 TreeSitterOptions::default(),
4473 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4474 );
4475
4476 let configs = matches
4477 .grammars()
4478 .iter()
4479 .map(|grammar| grammar.debug_variables_config.as_ref())
4480 .collect::<Vec<_>>();
4481
4482 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4483
4484 iter::from_fn(move || {
4485 loop {
4486 while let Some(capture) = captures.pop() {
4487 if capture.0.overlaps(&range) {
4488 return Some(capture);
4489 }
4490 }
4491
4492 let mat = matches.peek()?;
4493
4494 let Some(config) = configs[mat.grammar_index].as_ref() else {
4495 matches.advance();
4496 continue;
4497 };
4498
4499 for capture in mat.captures {
4500 let Some(ix) = config
4501 .objects_by_capture_ix
4502 .binary_search_by_key(&capture.index, |e| e.0)
4503 .ok()
4504 else {
4505 continue;
4506 };
4507 let text_object = config.objects_by_capture_ix[ix].1;
4508 let byte_range = capture.node.byte_range();
4509
4510 let mut found = false;
4511 for (range, existing) in captures.iter_mut() {
4512 if existing == &text_object {
4513 range.start = range.start.min(byte_range.start);
4514 range.end = range.end.max(byte_range.end);
4515 found = true;
4516 break;
4517 }
4518 }
4519
4520 if !found {
4521 captures.push((byte_range, text_object));
4522 }
4523 }
4524
4525 matches.advance();
4526 }
4527 })
4528 }
4529
4530 pub fn text_object_ranges<T: ToOffset>(
4531 &self,
4532 range: Range<T>,
4533 options: TreeSitterOptions,
4534 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4535 let range =
4536 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4537
4538 let mut matches =
4539 self.syntax
4540 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4541 grammar.text_object_config.as_ref().map(|c| &c.query)
4542 });
4543
4544 let configs = matches
4545 .grammars()
4546 .iter()
4547 .map(|grammar| grammar.text_object_config.as_ref())
4548 .collect::<Vec<_>>();
4549
4550 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4551
4552 iter::from_fn(move || {
4553 loop {
4554 while let Some(capture) = captures.pop() {
4555 if capture.0.overlaps(&range) {
4556 return Some(capture);
4557 }
4558 }
4559
4560 let mat = matches.peek()?;
4561
4562 let Some(config) = configs[mat.grammar_index].as_ref() else {
4563 matches.advance();
4564 continue;
4565 };
4566
4567 for capture in mat.captures {
4568 let Some(ix) = config
4569 .text_objects_by_capture_ix
4570 .binary_search_by_key(&capture.index, |e| e.0)
4571 .ok()
4572 else {
4573 continue;
4574 };
4575 let text_object = config.text_objects_by_capture_ix[ix].1;
4576 let byte_range = capture.node.byte_range();
4577
4578 let mut found = false;
4579 for (range, existing) in captures.iter_mut() {
4580 if existing == &text_object {
4581 range.start = range.start.min(byte_range.start);
4582 range.end = range.end.max(byte_range.end);
4583 found = true;
4584 break;
4585 }
4586 }
4587
4588 if !found {
4589 captures.push((byte_range, text_object));
4590 }
4591 }
4592
4593 matches.advance();
4594 }
4595 })
4596 }
4597
4598 /// Returns enclosing bracket ranges containing the given range
4599 pub fn enclosing_bracket_ranges<T: ToOffset>(
4600 &self,
4601 range: Range<T>,
4602 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4603 let range = range.start.to_offset(self)..range.end.to_offset(self);
4604
4605 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4606 let max_depth = result
4607 .iter()
4608 .map(|mat| mat.syntax_layer_depth)
4609 .max()
4610 .unwrap_or(0);
4611 result.into_iter().filter(move |pair| {
4612 pair.open_range.start <= range.start
4613 && pair.close_range.end >= range.end
4614 && pair.syntax_layer_depth == max_depth
4615 })
4616 }
4617
4618 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4619 ///
4620 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4621 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4622 &self,
4623 range: Range<T>,
4624 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4625 ) -> Option<(Range<usize>, Range<usize>)> {
4626 let range = range.start.to_offset(self)..range.end.to_offset(self);
4627
4628 // Get the ranges of the innermost pair of brackets.
4629 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4630
4631 for pair in self.enclosing_bracket_ranges(range) {
4632 if let Some(range_filter) = range_filter
4633 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4634 {
4635 continue;
4636 }
4637
4638 let len = pair.close_range.end - pair.open_range.start;
4639
4640 if let Some((existing_open, existing_close)) = &result {
4641 let existing_len = existing_close.end - existing_open.start;
4642 if len > existing_len {
4643 continue;
4644 }
4645 }
4646
4647 result = Some((pair.open_range, pair.close_range));
4648 }
4649
4650 result
4651 }
4652
4653 /// Returns anchor ranges for any matches of the redaction query.
4654 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4655 /// will be run on the relevant section of the buffer.
4656 pub fn redacted_ranges<T: ToOffset>(
4657 &self,
4658 range: Range<T>,
4659 ) -> impl Iterator<Item = Range<usize>> + '_ {
4660 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4661 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4662 grammar
4663 .redactions_config
4664 .as_ref()
4665 .map(|config| &config.query)
4666 });
4667
4668 let configs = syntax_matches
4669 .grammars()
4670 .iter()
4671 .map(|grammar| grammar.redactions_config.as_ref())
4672 .collect::<Vec<_>>();
4673
4674 iter::from_fn(move || {
4675 let redacted_range = syntax_matches
4676 .peek()
4677 .and_then(|mat| {
4678 configs[mat.grammar_index].and_then(|config| {
4679 mat.captures
4680 .iter()
4681 .find(|capture| capture.index == config.redaction_capture_ix)
4682 })
4683 })
4684 .map(|mat| mat.node.byte_range());
4685 syntax_matches.advance();
4686 redacted_range
4687 })
4688 }
4689
4690 pub fn injections_intersecting_range<T: ToOffset>(
4691 &self,
4692 range: Range<T>,
4693 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4694 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4695
4696 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4697 grammar
4698 .injection_config
4699 .as_ref()
4700 .map(|config| &config.query)
4701 });
4702
4703 let configs = syntax_matches
4704 .grammars()
4705 .iter()
4706 .map(|grammar| grammar.injection_config.as_ref())
4707 .collect::<Vec<_>>();
4708
4709 iter::from_fn(move || {
4710 let ranges = syntax_matches.peek().and_then(|mat| {
4711 let config = &configs[mat.grammar_index]?;
4712 let content_capture_range = mat.captures.iter().find_map(|capture| {
4713 if capture.index == config.content_capture_ix {
4714 Some(capture.node.byte_range())
4715 } else {
4716 None
4717 }
4718 })?;
4719 let language = self.language_at(content_capture_range.start)?;
4720 Some((content_capture_range, language))
4721 });
4722 syntax_matches.advance();
4723 ranges
4724 })
4725 }
4726
4727 pub fn runnable_ranges(
4728 &self,
4729 offset_range: Range<usize>,
4730 ) -> impl Iterator<Item = RunnableRange> + '_ {
4731 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4732 grammar.runnable_config.as_ref().map(|config| &config.query)
4733 });
4734
4735 let test_configs = syntax_matches
4736 .grammars()
4737 .iter()
4738 .map(|grammar| grammar.runnable_config.as_ref())
4739 .collect::<Vec<_>>();
4740
4741 iter::from_fn(move || {
4742 loop {
4743 let mat = syntax_matches.peek()?;
4744
4745 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4746 let mut run_range = None;
4747 let full_range = mat.captures.iter().fold(
4748 Range {
4749 start: usize::MAX,
4750 end: 0,
4751 },
4752 |mut acc, next| {
4753 let byte_range = next.node.byte_range();
4754 if acc.start > byte_range.start {
4755 acc.start = byte_range.start;
4756 }
4757 if acc.end < byte_range.end {
4758 acc.end = byte_range.end;
4759 }
4760 acc
4761 },
4762 );
4763 if full_range.start > full_range.end {
4764 // We did not find a full spanning range of this match.
4765 return None;
4766 }
4767 let extra_captures: SmallVec<[_; 1]> =
4768 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4769 test_configs
4770 .extra_captures
4771 .get(capture.index as usize)
4772 .cloned()
4773 .and_then(|tag_name| match tag_name {
4774 RunnableCapture::Named(name) => {
4775 Some((capture.node.byte_range(), name))
4776 }
4777 RunnableCapture::Run => {
4778 let _ = run_range.insert(capture.node.byte_range());
4779 None
4780 }
4781 })
4782 }));
4783 let run_range = run_range?;
4784 let tags = test_configs
4785 .query
4786 .property_settings(mat.pattern_index)
4787 .iter()
4788 .filter_map(|property| {
4789 if *property.key == *"tag" {
4790 property
4791 .value
4792 .as_ref()
4793 .map(|value| RunnableTag(value.to_string().into()))
4794 } else {
4795 None
4796 }
4797 })
4798 .collect();
4799 let extra_captures = extra_captures
4800 .into_iter()
4801 .map(|(range, name)| {
4802 (
4803 name.to_string(),
4804 self.text_for_range(range).collect::<String>(),
4805 )
4806 })
4807 .collect();
4808 // All tags should have the same range.
4809 Some(RunnableRange {
4810 run_range,
4811 full_range,
4812 runnable: Runnable {
4813 tags,
4814 language: mat.language,
4815 buffer: self.remote_id(),
4816 },
4817 extra_captures,
4818 buffer_id: self.remote_id(),
4819 })
4820 });
4821
4822 syntax_matches.advance();
4823 if test_range.is_some() {
4824 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4825 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4826 return test_range;
4827 }
4828 }
4829 })
4830 }
4831
4832 /// Returns selections for remote peers intersecting the given range.
4833 #[allow(clippy::type_complexity)]
4834 pub fn selections_in_range(
4835 &self,
4836 range: Range<Anchor>,
4837 include_local: bool,
4838 ) -> impl Iterator<
4839 Item = (
4840 ReplicaId,
4841 bool,
4842 CursorShape,
4843 impl Iterator<Item = &Selection<Anchor>> + '_,
4844 ),
4845 > + '_ {
4846 self.remote_selections
4847 .iter()
4848 .filter(move |(replica_id, set)| {
4849 (include_local || **replica_id != self.text.replica_id())
4850 && !set.selections.is_empty()
4851 })
4852 .map(move |(replica_id, set)| {
4853 let start_ix = match set.selections.binary_search_by(|probe| {
4854 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4855 }) {
4856 Ok(ix) | Err(ix) => ix,
4857 };
4858 let end_ix = match set.selections.binary_search_by(|probe| {
4859 probe.start.cmp(&range.end, self).then(Ordering::Less)
4860 }) {
4861 Ok(ix) | Err(ix) => ix,
4862 };
4863
4864 (
4865 *replica_id,
4866 set.line_mode,
4867 set.cursor_shape,
4868 set.selections[start_ix..end_ix].iter(),
4869 )
4870 })
4871 }
4872
4873 /// Returns if the buffer contains any diagnostics.
4874 pub fn has_diagnostics(&self) -> bool {
4875 !self.diagnostics.is_empty()
4876 }
4877
4878 /// Returns all the diagnostics intersecting the given range.
4879 pub fn diagnostics_in_range<'a, T, O>(
4880 &'a self,
4881 search_range: Range<T>,
4882 reversed: bool,
4883 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4884 where
4885 T: 'a + Clone + ToOffset,
4886 O: 'a + FromAnchor,
4887 {
4888 let mut iterators: Vec<_> = self
4889 .diagnostics
4890 .iter()
4891 .map(|(_, collection)| {
4892 collection
4893 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4894 .peekable()
4895 })
4896 .collect();
4897
4898 std::iter::from_fn(move || {
4899 let (next_ix, _) = iterators
4900 .iter_mut()
4901 .enumerate()
4902 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4903 .min_by(|(_, a), (_, b)| {
4904 let cmp = a
4905 .range
4906 .start
4907 .cmp(&b.range.start, self)
4908 // when range is equal, sort by diagnostic severity
4909 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4910 // and stabilize order with group_id
4911 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4912 if reversed { cmp.reverse() } else { cmp }
4913 })?;
4914 iterators[next_ix]
4915 .next()
4916 .map(
4917 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4918 diagnostic,
4919 range: FromAnchor::from_anchor(&range.start, self)
4920 ..FromAnchor::from_anchor(&range.end, self),
4921 },
4922 )
4923 })
4924 }
4925
4926 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4927 /// should be used instead.
4928 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4929 &self.diagnostics
4930 }
4931
4932 /// Returns all the diagnostic groups associated with the given
4933 /// language server ID. If no language server ID is provided,
4934 /// all diagnostics groups are returned.
4935 pub fn diagnostic_groups(
4936 &self,
4937 language_server_id: Option<LanguageServerId>,
4938 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4939 let mut groups = Vec::new();
4940
4941 if let Some(language_server_id) = language_server_id {
4942 if let Ok(ix) = self
4943 .diagnostics
4944 .binary_search_by_key(&language_server_id, |e| e.0)
4945 {
4946 self.diagnostics[ix]
4947 .1
4948 .groups(language_server_id, &mut groups, self);
4949 }
4950 } else {
4951 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4952 diagnostics.groups(*language_server_id, &mut groups, self);
4953 }
4954 }
4955
4956 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4957 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4958 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4959 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4960 });
4961
4962 groups
4963 }
4964
4965 /// Returns an iterator over the diagnostics for the given group.
4966 pub fn diagnostic_group<O>(
4967 &self,
4968 group_id: usize,
4969 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4970 where
4971 O: FromAnchor + 'static,
4972 {
4973 self.diagnostics
4974 .iter()
4975 .flat_map(move |(_, set)| set.group(group_id, self))
4976 }
4977
4978 /// An integer version number that accounts for all updates besides
4979 /// the buffer's text itself (which is versioned via a version vector).
4980 pub fn non_text_state_update_count(&self) -> usize {
4981 self.non_text_state_update_count
4982 }
4983
4984 /// An integer version that changes when the buffer's syntax changes.
4985 pub fn syntax_update_count(&self) -> usize {
4986 self.syntax.update_count()
4987 }
4988
4989 /// Returns a snapshot of underlying file.
4990 pub fn file(&self) -> Option<&Arc<dyn File>> {
4991 self.file.as_ref()
4992 }
4993
4994 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4995 if let Some(file) = self.file() {
4996 if file.path().file_name().is_none() || include_root {
4997 Some(file.full_path(cx).to_string_lossy().into_owned())
4998 } else {
4999 Some(file.path().display(file.path_style(cx)).to_string())
5000 }
5001 } else {
5002 None
5003 }
5004 }
5005
5006 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5007 let query_str = query.fuzzy_contents;
5008 if query_str.is_some_and(|query| query.is_empty()) {
5009 return BTreeMap::default();
5010 }
5011
5012 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5013 language,
5014 override_id: None,
5015 }));
5016
5017 let mut query_ix = 0;
5018 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5019 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5020
5021 let mut words = BTreeMap::default();
5022 let mut current_word_start_ix = None;
5023 let mut chunk_ix = query.range.start;
5024 for chunk in self.chunks(query.range, false) {
5025 for (i, c) in chunk.text.char_indices() {
5026 let ix = chunk_ix + i;
5027 if classifier.is_word(c) {
5028 if current_word_start_ix.is_none() {
5029 current_word_start_ix = Some(ix);
5030 }
5031
5032 if let Some(query_chars) = &query_chars
5033 && query_ix < query_len
5034 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5035 {
5036 query_ix += 1;
5037 }
5038 continue;
5039 } else if let Some(word_start) = current_word_start_ix.take()
5040 && query_ix == query_len
5041 {
5042 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5043 let mut word_text = self.text_for_range(word_start..ix).peekable();
5044 let first_char = word_text
5045 .peek()
5046 .and_then(|first_chunk| first_chunk.chars().next());
5047 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5048 if !query.skip_digits
5049 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5050 {
5051 words.insert(word_text.collect(), word_range);
5052 }
5053 }
5054 query_ix = 0;
5055 }
5056 chunk_ix += chunk.text.len();
5057 }
5058
5059 words
5060 }
5061}
5062
5063pub struct WordsQuery<'a> {
5064 /// Only returns words with all chars from the fuzzy string in them.
5065 pub fuzzy_contents: Option<&'a str>,
5066 /// Skips words that start with a digit.
5067 pub skip_digits: bool,
5068 /// Buffer offset range, to look for words.
5069 pub range: Range<usize>,
5070}
5071
5072fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5073 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5074}
5075
5076fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5077 let mut result = IndentSize::spaces(0);
5078 for c in text {
5079 let kind = match c {
5080 ' ' => IndentKind::Space,
5081 '\t' => IndentKind::Tab,
5082 _ => break,
5083 };
5084 if result.len == 0 {
5085 result.kind = kind;
5086 }
5087 result.len += 1;
5088 }
5089 result
5090}
5091
5092impl Clone for BufferSnapshot {
5093 fn clone(&self) -> Self {
5094 Self {
5095 text: self.text.clone(),
5096 syntax: self.syntax.clone(),
5097 file: self.file.clone(),
5098 remote_selections: self.remote_selections.clone(),
5099 diagnostics: self.diagnostics.clone(),
5100 language: self.language.clone(),
5101 tree_sitter_data: self.tree_sitter_data.clone(),
5102 non_text_state_update_count: self.non_text_state_update_count,
5103 }
5104 }
5105}
5106
5107impl Deref for BufferSnapshot {
5108 type Target = text::BufferSnapshot;
5109
5110 fn deref(&self) -> &Self::Target {
5111 &self.text
5112 }
5113}
5114
5115unsafe impl Send for BufferChunks<'_> {}
5116
5117impl<'a> BufferChunks<'a> {
5118 pub(crate) fn new(
5119 text: &'a Rope,
5120 range: Range<usize>,
5121 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5122 diagnostics: bool,
5123 buffer_snapshot: Option<&'a BufferSnapshot>,
5124 ) -> Self {
5125 let mut highlights = None;
5126 if let Some((captures, highlight_maps)) = syntax {
5127 highlights = Some(BufferChunkHighlights {
5128 captures,
5129 next_capture: None,
5130 stack: Default::default(),
5131 highlight_maps,
5132 })
5133 }
5134
5135 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5136 let chunks = text.chunks_in_range(range.clone());
5137
5138 let mut this = BufferChunks {
5139 range,
5140 buffer_snapshot,
5141 chunks,
5142 diagnostic_endpoints,
5143 error_depth: 0,
5144 warning_depth: 0,
5145 information_depth: 0,
5146 hint_depth: 0,
5147 unnecessary_depth: 0,
5148 underline: true,
5149 highlights,
5150 };
5151 this.initialize_diagnostic_endpoints();
5152 this
5153 }
5154
5155 /// Seeks to the given byte offset in the buffer.
5156 pub fn seek(&mut self, range: Range<usize>) {
5157 let old_range = std::mem::replace(&mut self.range, range.clone());
5158 self.chunks.set_range(self.range.clone());
5159 if let Some(highlights) = self.highlights.as_mut() {
5160 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5161 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5162 highlights
5163 .stack
5164 .retain(|(end_offset, _)| *end_offset > range.start);
5165 if let Some(capture) = &highlights.next_capture
5166 && range.start >= capture.node.start_byte()
5167 {
5168 let next_capture_end = capture.node.end_byte();
5169 if range.start < next_capture_end {
5170 highlights.stack.push((
5171 next_capture_end,
5172 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5173 ));
5174 }
5175 highlights.next_capture.take();
5176 }
5177 } else if let Some(snapshot) = self.buffer_snapshot {
5178 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5179 *highlights = BufferChunkHighlights {
5180 captures,
5181 next_capture: None,
5182 stack: Default::default(),
5183 highlight_maps,
5184 };
5185 } else {
5186 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5187 // Seeking such BufferChunks is not supported.
5188 debug_assert!(
5189 false,
5190 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5191 );
5192 }
5193
5194 highlights.captures.set_byte_range(self.range.clone());
5195 self.initialize_diagnostic_endpoints();
5196 }
5197 }
5198
5199 fn initialize_diagnostic_endpoints(&mut self) {
5200 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5201 && let Some(buffer) = self.buffer_snapshot
5202 {
5203 let mut diagnostic_endpoints = Vec::new();
5204 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5205 diagnostic_endpoints.push(DiagnosticEndpoint {
5206 offset: entry.range.start,
5207 is_start: true,
5208 severity: entry.diagnostic.severity,
5209 is_unnecessary: entry.diagnostic.is_unnecessary,
5210 underline: entry.diagnostic.underline,
5211 });
5212 diagnostic_endpoints.push(DiagnosticEndpoint {
5213 offset: entry.range.end,
5214 is_start: false,
5215 severity: entry.diagnostic.severity,
5216 is_unnecessary: entry.diagnostic.is_unnecessary,
5217 underline: entry.diagnostic.underline,
5218 });
5219 }
5220 diagnostic_endpoints
5221 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5222 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5223 self.hint_depth = 0;
5224 self.error_depth = 0;
5225 self.warning_depth = 0;
5226 self.information_depth = 0;
5227 }
5228 }
5229
5230 /// The current byte offset in the buffer.
5231 pub fn offset(&self) -> usize {
5232 self.range.start
5233 }
5234
5235 pub fn range(&self) -> Range<usize> {
5236 self.range.clone()
5237 }
5238
5239 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5240 let depth = match endpoint.severity {
5241 DiagnosticSeverity::ERROR => &mut self.error_depth,
5242 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5243 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5244 DiagnosticSeverity::HINT => &mut self.hint_depth,
5245 _ => return,
5246 };
5247 if endpoint.is_start {
5248 *depth += 1;
5249 } else {
5250 *depth -= 1;
5251 }
5252
5253 if endpoint.is_unnecessary {
5254 if endpoint.is_start {
5255 self.unnecessary_depth += 1;
5256 } else {
5257 self.unnecessary_depth -= 1;
5258 }
5259 }
5260 }
5261
5262 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5263 if self.error_depth > 0 {
5264 Some(DiagnosticSeverity::ERROR)
5265 } else if self.warning_depth > 0 {
5266 Some(DiagnosticSeverity::WARNING)
5267 } else if self.information_depth > 0 {
5268 Some(DiagnosticSeverity::INFORMATION)
5269 } else if self.hint_depth > 0 {
5270 Some(DiagnosticSeverity::HINT)
5271 } else {
5272 None
5273 }
5274 }
5275
5276 fn current_code_is_unnecessary(&self) -> bool {
5277 self.unnecessary_depth > 0
5278 }
5279}
5280
5281impl<'a> Iterator for BufferChunks<'a> {
5282 type Item = Chunk<'a>;
5283
5284 fn next(&mut self) -> Option<Self::Item> {
5285 let mut next_capture_start = usize::MAX;
5286 let mut next_diagnostic_endpoint = usize::MAX;
5287
5288 if let Some(highlights) = self.highlights.as_mut() {
5289 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5290 if *parent_capture_end <= self.range.start {
5291 highlights.stack.pop();
5292 } else {
5293 break;
5294 }
5295 }
5296
5297 if highlights.next_capture.is_none() {
5298 highlights.next_capture = highlights.captures.next();
5299 }
5300
5301 while let Some(capture) = highlights.next_capture.as_ref() {
5302 if self.range.start < capture.node.start_byte() {
5303 next_capture_start = capture.node.start_byte();
5304 break;
5305 } else {
5306 let highlight_id =
5307 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5308 highlights
5309 .stack
5310 .push((capture.node.end_byte(), highlight_id));
5311 highlights.next_capture = highlights.captures.next();
5312 }
5313 }
5314 }
5315
5316 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5317 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5318 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5319 if endpoint.offset <= self.range.start {
5320 self.update_diagnostic_depths(endpoint);
5321 diagnostic_endpoints.next();
5322 self.underline = endpoint.underline;
5323 } else {
5324 next_diagnostic_endpoint = endpoint.offset;
5325 break;
5326 }
5327 }
5328 }
5329 self.diagnostic_endpoints = diagnostic_endpoints;
5330
5331 if let Some(ChunkBitmaps {
5332 text: chunk,
5333 chars: chars_map,
5334 tabs,
5335 }) = self.chunks.peek_with_bitmaps()
5336 {
5337 let chunk_start = self.range.start;
5338 let mut chunk_end = (self.chunks.offset() + chunk.len())
5339 .min(next_capture_start)
5340 .min(next_diagnostic_endpoint);
5341 let mut highlight_id = None;
5342 if let Some(highlights) = self.highlights.as_ref()
5343 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5344 {
5345 chunk_end = chunk_end.min(*parent_capture_end);
5346 highlight_id = Some(*parent_highlight_id);
5347 }
5348 let bit_start = chunk_start - self.chunks.offset();
5349 let bit_end = chunk_end - self.chunks.offset();
5350
5351 let slice = &chunk[bit_start..bit_end];
5352
5353 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5354 let tabs = (tabs >> bit_start) & mask;
5355 let chars = (chars_map >> bit_start) & mask;
5356
5357 self.range.start = chunk_end;
5358 if self.range.start == self.chunks.offset() + chunk.len() {
5359 self.chunks.next().unwrap();
5360 }
5361
5362 Some(Chunk {
5363 text: slice,
5364 syntax_highlight_id: highlight_id,
5365 underline: self.underline,
5366 diagnostic_severity: self.current_diagnostic_severity(),
5367 is_unnecessary: self.current_code_is_unnecessary(),
5368 tabs,
5369 chars,
5370 ..Chunk::default()
5371 })
5372 } else {
5373 None
5374 }
5375 }
5376}
5377
5378impl operation_queue::Operation for Operation {
5379 fn lamport_timestamp(&self) -> clock::Lamport {
5380 match self {
5381 Operation::Buffer(_) => {
5382 unreachable!("buffer operations should never be deferred at this layer")
5383 }
5384 Operation::UpdateDiagnostics {
5385 lamport_timestamp, ..
5386 }
5387 | Operation::UpdateSelections {
5388 lamport_timestamp, ..
5389 }
5390 | Operation::UpdateCompletionTriggers {
5391 lamport_timestamp, ..
5392 }
5393 | Operation::UpdateLineEnding {
5394 lamport_timestamp, ..
5395 } => *lamport_timestamp,
5396 }
5397 }
5398}
5399
5400impl Default for Diagnostic {
5401 fn default() -> Self {
5402 Self {
5403 source: Default::default(),
5404 source_kind: DiagnosticSourceKind::Other,
5405 code: None,
5406 code_description: None,
5407 severity: DiagnosticSeverity::ERROR,
5408 message: Default::default(),
5409 markdown: None,
5410 group_id: 0,
5411 is_primary: false,
5412 is_disk_based: false,
5413 is_unnecessary: false,
5414 underline: true,
5415 data: None,
5416 }
5417 }
5418}
5419
5420impl IndentSize {
5421 /// Returns an [`IndentSize`] representing the given spaces.
5422 pub fn spaces(len: u32) -> Self {
5423 Self {
5424 len,
5425 kind: IndentKind::Space,
5426 }
5427 }
5428
5429 /// Returns an [`IndentSize`] representing a tab.
5430 pub fn tab() -> Self {
5431 Self {
5432 len: 1,
5433 kind: IndentKind::Tab,
5434 }
5435 }
5436
5437 /// An iterator over the characters represented by this [`IndentSize`].
5438 pub fn chars(&self) -> impl Iterator<Item = char> {
5439 iter::repeat(self.char()).take(self.len as usize)
5440 }
5441
5442 /// The character representation of this [`IndentSize`].
5443 pub fn char(&self) -> char {
5444 match self.kind {
5445 IndentKind::Space => ' ',
5446 IndentKind::Tab => '\t',
5447 }
5448 }
5449
5450 /// Consumes the current [`IndentSize`] and returns a new one that has
5451 /// been shrunk or enlarged by the given size along the given direction.
5452 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5453 match direction {
5454 Ordering::Less => {
5455 if self.kind == size.kind && self.len >= size.len {
5456 self.len -= size.len;
5457 }
5458 }
5459 Ordering::Equal => {}
5460 Ordering::Greater => {
5461 if self.len == 0 {
5462 self = size;
5463 } else if self.kind == size.kind {
5464 self.len += size.len;
5465 }
5466 }
5467 }
5468 self
5469 }
5470
5471 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5472 match self.kind {
5473 IndentKind::Space => self.len as usize,
5474 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5475 }
5476 }
5477}
5478
5479#[cfg(any(test, feature = "test-support"))]
5480pub struct TestFile {
5481 pub path: Arc<RelPath>,
5482 pub root_name: String,
5483 pub local_root: Option<PathBuf>,
5484}
5485
5486#[cfg(any(test, feature = "test-support"))]
5487impl File for TestFile {
5488 fn path(&self) -> &Arc<RelPath> {
5489 &self.path
5490 }
5491
5492 fn full_path(&self, _: &gpui::App) -> PathBuf {
5493 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5494 }
5495
5496 fn as_local(&self) -> Option<&dyn LocalFile> {
5497 if self.local_root.is_some() {
5498 Some(self)
5499 } else {
5500 None
5501 }
5502 }
5503
5504 fn disk_state(&self) -> DiskState {
5505 unimplemented!()
5506 }
5507
5508 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5509 self.path().file_name().unwrap_or(self.root_name.as_ref())
5510 }
5511
5512 fn worktree_id(&self, _: &App) -> WorktreeId {
5513 WorktreeId::from_usize(0)
5514 }
5515
5516 fn to_proto(&self, _: &App) -> rpc::proto::File {
5517 unimplemented!()
5518 }
5519
5520 fn is_private(&self) -> bool {
5521 false
5522 }
5523
5524 fn path_style(&self, _cx: &App) -> PathStyle {
5525 PathStyle::local()
5526 }
5527}
5528
5529#[cfg(any(test, feature = "test-support"))]
5530impl LocalFile for TestFile {
5531 fn abs_path(&self, _cx: &App) -> PathBuf {
5532 PathBuf::from(self.local_root.as_ref().unwrap())
5533 .join(&self.root_name)
5534 .join(self.path.as_std_path())
5535 }
5536
5537 fn load(&self, _cx: &App) -> Task<Result<String>> {
5538 unimplemented!()
5539 }
5540
5541 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5542 unimplemented!()
5543 }
5544}
5545
5546pub(crate) fn contiguous_ranges(
5547 values: impl Iterator<Item = u32>,
5548 max_len: usize,
5549) -> impl Iterator<Item = Range<u32>> {
5550 let mut values = values;
5551 let mut current_range: Option<Range<u32>> = None;
5552 std::iter::from_fn(move || {
5553 loop {
5554 if let Some(value) = values.next() {
5555 if let Some(range) = &mut current_range
5556 && value == range.end
5557 && range.len() < max_len
5558 {
5559 range.end += 1;
5560 continue;
5561 }
5562
5563 let prev_range = current_range.clone();
5564 current_range = Some(value..(value + 1));
5565 if prev_range.is_some() {
5566 return prev_range;
5567 }
5568 } else {
5569 return current_range.take();
5570 }
5571 }
5572 })
5573}
5574
5575#[derive(Default, Debug)]
5576pub struct CharClassifier {
5577 scope: Option<LanguageScope>,
5578 scope_context: Option<CharScopeContext>,
5579 ignore_punctuation: bool,
5580}
5581
5582impl CharClassifier {
5583 pub fn new(scope: Option<LanguageScope>) -> Self {
5584 Self {
5585 scope,
5586 scope_context: None,
5587 ignore_punctuation: false,
5588 }
5589 }
5590
5591 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5592 Self {
5593 scope_context,
5594 ..self
5595 }
5596 }
5597
5598 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5599 Self {
5600 ignore_punctuation,
5601 ..self
5602 }
5603 }
5604
5605 pub fn is_whitespace(&self, c: char) -> bool {
5606 self.kind(c) == CharKind::Whitespace
5607 }
5608
5609 pub fn is_word(&self, c: char) -> bool {
5610 self.kind(c) == CharKind::Word
5611 }
5612
5613 pub fn is_punctuation(&self, c: char) -> bool {
5614 self.kind(c) == CharKind::Punctuation
5615 }
5616
5617 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5618 if c.is_alphanumeric() || c == '_' {
5619 return CharKind::Word;
5620 }
5621
5622 if let Some(scope) = &self.scope {
5623 let characters = match self.scope_context {
5624 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5625 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5626 None => scope.word_characters(),
5627 };
5628 if let Some(characters) = characters
5629 && characters.contains(&c)
5630 {
5631 return CharKind::Word;
5632 }
5633 }
5634
5635 if c.is_whitespace() {
5636 return CharKind::Whitespace;
5637 }
5638
5639 if ignore_punctuation {
5640 CharKind::Word
5641 } else {
5642 CharKind::Punctuation
5643 }
5644 }
5645
5646 pub fn kind(&self, c: char) -> CharKind {
5647 self.kind_with(c, self.ignore_punctuation)
5648 }
5649}
5650
5651/// Find all of the ranges of whitespace that occur at the ends of lines
5652/// in the given rope.
5653///
5654/// This could also be done with a regex search, but this implementation
5655/// avoids copying text.
5656pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5657 let mut ranges = Vec::new();
5658
5659 let mut offset = 0;
5660 let mut prev_chunk_trailing_whitespace_range = 0..0;
5661 for chunk in rope.chunks() {
5662 let mut prev_line_trailing_whitespace_range = 0..0;
5663 for (i, line) in chunk.split('\n').enumerate() {
5664 let line_end_offset = offset + line.len();
5665 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5666 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5667
5668 if i == 0 && trimmed_line_len == 0 {
5669 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5670 }
5671 if !prev_line_trailing_whitespace_range.is_empty() {
5672 ranges.push(prev_line_trailing_whitespace_range);
5673 }
5674
5675 offset = line_end_offset + 1;
5676 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5677 }
5678
5679 offset -= 1;
5680 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5681 }
5682
5683 if !prev_chunk_trailing_whitespace_range.is_empty() {
5684 ranges.push(prev_chunk_trailing_whitespace_range);
5685 }
5686
5687 ranges
5688}