1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use itertools::Itertools;
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet, hash_map},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::{Arc, LazyLock},
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// A label for the background task spawned by the buffer to compute
80/// a diff against the contents of its file.
81pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
82
83/// Indicate whether a [`Buffer`] has permissions to edit.
84#[derive(PartialEq, Clone, Copy, Debug)]
85pub enum Capability {
86 /// The buffer is a mutable replica.
87 ReadWrite,
88 /// The buffer is a read-only replica.
89 ReadOnly,
90}
91
92pub type BufferRow = u32;
93
94/// An in-memory representation of a source code file, including its text,
95/// syntax trees, git status, and diagnostics.
96pub struct Buffer {
97 text: TextBuffer,
98 branch_state: Option<BufferBranchState>,
99 /// Filesystem state, `None` when there is no path.
100 file: Option<Arc<dyn File>>,
101 /// The mtime of the file when this buffer was last loaded from
102 /// or saved to disk.
103 saved_mtime: Option<MTime>,
104 /// The version vector when this buffer was last loaded from
105 /// or saved to disk.
106 saved_version: clock::Global,
107 preview_version: clock::Global,
108 transaction_depth: usize,
109 was_dirty_before_starting_transaction: Option<bool>,
110 reload_task: Option<Task<Result<()>>>,
111 language: Option<Arc<Language>>,
112 autoindent_requests: Vec<Arc<AutoindentRequest>>,
113 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
114 pending_autoindent: Option<Task<()>>,
115 sync_parse_timeout: Duration,
116 syntax_map: Mutex<SyntaxMap>,
117 reparse: Option<Task<()>>,
118 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
119 non_text_state_update_count: usize,
120 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
121 remote_selections: TreeMap<ReplicaId, SelectionSet>,
122 diagnostics_timestamp: clock::Lamport,
123 completion_triggers: BTreeSet<String>,
124 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
125 completion_triggers_timestamp: clock::Lamport,
126 deferred_ops: OperationQueue<Operation>,
127 capability: Capability,
128 has_conflict: bool,
129 /// Memoize calls to has_changes_since(saved_version).
130 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
131 has_unsaved_edits: Cell<(clock::Global, bool)>,
132 change_bits: Vec<rc::Weak<Cell<bool>>>,
133 _subscriptions: Vec<gpui::Subscription>,
134 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
135}
136
137#[derive(Debug, Clone)]
138pub struct TreeSitterData {
139 chunks: RowChunks,
140 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
141}
142
143const MAX_ROWS_IN_A_CHUNK: u32 = 50;
144
145impl TreeSitterData {
146 fn clear(&mut self) {
147 self.brackets_by_chunks = vec![None; self.chunks.len()];
148 }
149
150 fn new(snapshot: text::BufferSnapshot) -> Self {
151 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
152 Self {
153 brackets_by_chunks: vec![None; chunks.len()],
154 chunks,
155 }
156 }
157}
158
159#[derive(Copy, Clone, Debug, PartialEq, Eq)]
160pub enum ParseStatus {
161 Idle,
162 Parsing,
163}
164
165struct BufferBranchState {
166 base_buffer: Entity<Buffer>,
167 merged_operations: Vec<Lamport>,
168}
169
170/// An immutable, cheaply cloneable representation of a fixed
171/// state of a buffer.
172pub struct BufferSnapshot {
173 pub text: text::BufferSnapshot,
174 pub syntax: SyntaxSnapshot,
175 file: Option<Arc<dyn File>>,
176 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
177 remote_selections: TreeMap<ReplicaId, SelectionSet>,
178 language: Option<Arc<Language>>,
179 non_text_state_update_count: usize,
180 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
181}
182
183/// The kind and amount of indentation in a particular line. For now,
184/// assumes that indentation is all the same character.
185#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
186pub struct IndentSize {
187 /// The number of bytes that comprise the indentation.
188 pub len: u32,
189 /// The kind of whitespace used for indentation.
190 pub kind: IndentKind,
191}
192
193/// A whitespace character that's used for indentation.
194#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
195pub enum IndentKind {
196 /// An ASCII space character.
197 #[default]
198 Space,
199 /// An ASCII tab character.
200 Tab,
201}
202
203/// The shape of a selection cursor.
204#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
205pub enum CursorShape {
206 /// A vertical bar
207 #[default]
208 Bar,
209 /// A block that surrounds the following character
210 Block,
211 /// An underline that runs along the following character
212 Underline,
213 /// A box drawn around the following character
214 Hollow,
215}
216
217impl From<settings::CursorShape> for CursorShape {
218 fn from(shape: settings::CursorShape) -> Self {
219 match shape {
220 settings::CursorShape::Bar => CursorShape::Bar,
221 settings::CursorShape::Block => CursorShape::Block,
222 settings::CursorShape::Underline => CursorShape::Underline,
223 settings::CursorShape::Hollow => CursorShape::Hollow,
224 }
225 }
226}
227
228#[derive(Clone, Debug)]
229struct SelectionSet {
230 line_mode: bool,
231 cursor_shape: CursorShape,
232 selections: Arc<[Selection<Anchor>]>,
233 lamport_timestamp: clock::Lamport,
234}
235
236/// A diagnostic associated with a certain range of a buffer.
237#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
238pub struct Diagnostic {
239 /// The name of the service that produced this diagnostic.
240 pub source: Option<String>,
241 /// A machine-readable code that identifies this diagnostic.
242 pub code: Option<NumberOrString>,
243 pub code_description: Option<lsp::Uri>,
244 /// Whether this diagnostic is a hint, warning, or error.
245 pub severity: DiagnosticSeverity,
246 /// The human-readable message associated with this diagnostic.
247 pub message: String,
248 /// The human-readable message (in markdown format)
249 pub markdown: Option<String>,
250 /// An id that identifies the group to which this diagnostic belongs.
251 ///
252 /// When a language server produces a diagnostic with
253 /// one or more associated diagnostics, those diagnostics are all
254 /// assigned a single group ID.
255 pub group_id: usize,
256 /// Whether this diagnostic is the primary diagnostic for its group.
257 ///
258 /// In a given group, the primary diagnostic is the top-level diagnostic
259 /// returned by the language server. The non-primary diagnostics are the
260 /// associated diagnostics.
261 pub is_primary: bool,
262 /// Whether this diagnostic is considered to originate from an analysis of
263 /// files on disk, as opposed to any unsaved buffer contents. This is a
264 /// property of a given diagnostic source, and is configured for a given
265 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
266 /// for the language server.
267 pub is_disk_based: bool,
268 /// Whether this diagnostic marks unnecessary code.
269 pub is_unnecessary: bool,
270 /// Quick separation of diagnostics groups based by their source.
271 pub source_kind: DiagnosticSourceKind,
272 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
273 pub data: Option<Value>,
274 /// Whether to underline the corresponding text range in the editor.
275 pub underline: bool,
276}
277
278#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
279pub enum DiagnosticSourceKind {
280 Pulled,
281 Pushed,
282 Other,
283}
284
285/// An operation used to synchronize this buffer with its other replicas.
286#[derive(Clone, Debug, PartialEq)]
287pub enum Operation {
288 /// A text operation.
289 Buffer(text::Operation),
290
291 /// An update to the buffer's diagnostics.
292 UpdateDiagnostics {
293 /// The id of the language server that produced the new diagnostics.
294 server_id: LanguageServerId,
295 /// The diagnostics.
296 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
297 /// The buffer's lamport timestamp.
298 lamport_timestamp: clock::Lamport,
299 },
300
301 /// An update to the most recent selections in this buffer.
302 UpdateSelections {
303 /// The selections.
304 selections: Arc<[Selection<Anchor>]>,
305 /// The buffer's lamport timestamp.
306 lamport_timestamp: clock::Lamport,
307 /// Whether the selections are in 'line mode'.
308 line_mode: bool,
309 /// The [`CursorShape`] associated with these selections.
310 cursor_shape: CursorShape,
311 },
312
313 /// An update to the characters that should trigger autocompletion
314 /// for this buffer.
315 UpdateCompletionTriggers {
316 /// The characters that trigger autocompletion.
317 triggers: Vec<String>,
318 /// The buffer's lamport timestamp.
319 lamport_timestamp: clock::Lamport,
320 /// The language server ID.
321 server_id: LanguageServerId,
322 },
323
324 /// An update to the line ending type of this buffer.
325 UpdateLineEnding {
326 /// The line ending type.
327 line_ending: LineEnding,
328 /// The buffer's lamport timestamp.
329 lamport_timestamp: clock::Lamport,
330 },
331}
332
333/// An event that occurs in a buffer.
334#[derive(Clone, Debug, PartialEq)]
335pub enum BufferEvent {
336 /// The buffer was changed in a way that must be
337 /// propagated to its other replicas.
338 Operation {
339 operation: Operation,
340 is_local: bool,
341 },
342 /// The buffer was edited.
343 Edited,
344 /// The buffer's `dirty` bit changed.
345 DirtyChanged,
346 /// The buffer was saved.
347 Saved,
348 /// The buffer's file was changed on disk.
349 FileHandleChanged,
350 /// The buffer was reloaded.
351 Reloaded,
352 /// The buffer is in need of a reload
353 ReloadNeeded,
354 /// The buffer's language was changed.
355 LanguageChanged,
356 /// The buffer's syntax trees were updated.
357 Reparsed,
358 /// The buffer's diagnostics were updated.
359 DiagnosticsUpdated,
360 /// The buffer gained or lost editing capabilities.
361 CapabilityChanged,
362}
363
364/// The file associated with a buffer.
365pub trait File: Send + Sync + Any {
366 /// Returns the [`LocalFile`] associated with this file, if the
367 /// file is local.
368 fn as_local(&self) -> Option<&dyn LocalFile>;
369
370 /// Returns whether this file is local.
371 fn is_local(&self) -> bool {
372 self.as_local().is_some()
373 }
374
375 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
376 /// only available in some states, such as modification time.
377 fn disk_state(&self) -> DiskState;
378
379 /// Returns the path of this file relative to the worktree's root directory.
380 fn path(&self) -> &Arc<RelPath>;
381
382 /// Returns the path of this file relative to the worktree's parent directory (this means it
383 /// includes the name of the worktree's root folder).
384 fn full_path(&self, cx: &App) -> PathBuf;
385
386 /// Returns the path style of this file.
387 fn path_style(&self, cx: &App) -> PathStyle;
388
389 /// Returns the last component of this handle's absolute path. If this handle refers to the root
390 /// of its worktree, then this method will return the name of the worktree itself.
391 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
392
393 /// Returns the id of the worktree to which this file belongs.
394 ///
395 /// This is needed for looking up project-specific settings.
396 fn worktree_id(&self, cx: &App) -> WorktreeId;
397
398 /// Converts this file into a protobuf message.
399 fn to_proto(&self, cx: &App) -> rpc::proto::File;
400
401 /// Return whether Zed considers this to be a private file.
402 fn is_private(&self) -> bool;
403}
404
405/// The file's storage status - whether it's stored (`Present`), and if so when it was last
406/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
407/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
408/// indicator for new files.
409#[derive(Copy, Clone, Debug, PartialEq)]
410pub enum DiskState {
411 /// File created in Zed that has not been saved.
412 New,
413 /// File present on the filesystem.
414 Present { mtime: MTime },
415 /// Deleted file that was previously present.
416 Deleted,
417}
418
419impl DiskState {
420 /// Returns the file's last known modification time on disk.
421 pub fn mtime(self) -> Option<MTime> {
422 match self {
423 DiskState::New => None,
424 DiskState::Present { mtime } => Some(mtime),
425 DiskState::Deleted => None,
426 }
427 }
428
429 pub fn exists(&self) -> bool {
430 match self {
431 DiskState::New => false,
432 DiskState::Present { .. } => true,
433 DiskState::Deleted => false,
434 }
435 }
436}
437
438/// The file associated with a buffer, in the case where the file is on the local disk.
439pub trait LocalFile: File {
440 /// Returns the absolute path of this file
441 fn abs_path(&self, cx: &App) -> PathBuf;
442
443 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
444 fn load(&self, cx: &App) -> Task<Result<String>>;
445
446 /// Loads the file's contents from disk.
447 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
448}
449
450/// The auto-indent behavior associated with an editing operation.
451/// For some editing operations, each affected line of text has its
452/// indentation recomputed. For other operations, the entire block
453/// of edited text is adjusted uniformly.
454#[derive(Clone, Debug)]
455pub enum AutoindentMode {
456 /// Indent each line of inserted text.
457 EachLine,
458 /// Apply the same indentation adjustment to all of the lines
459 /// in a given insertion.
460 Block {
461 /// The original indentation column of the first line of each
462 /// insertion, if it has been copied.
463 ///
464 /// Knowing this makes it possible to preserve the relative indentation
465 /// of every line in the insertion from when it was copied.
466 ///
467 /// If the original indent column is `a`, and the first line of insertion
468 /// is then auto-indented to column `b`, then every other line of
469 /// the insertion will be auto-indented to column `b - a`
470 original_indent_columns: Vec<Option<u32>>,
471 },
472}
473
474#[derive(Clone)]
475struct AutoindentRequest {
476 before_edit: BufferSnapshot,
477 entries: Vec<AutoindentRequestEntry>,
478 is_block_mode: bool,
479 ignore_empty_lines: bool,
480}
481
482#[derive(Debug, Clone)]
483struct AutoindentRequestEntry {
484 /// A range of the buffer whose indentation should be adjusted.
485 range: Range<Anchor>,
486 /// Whether or not these lines should be considered brand new, for the
487 /// purpose of auto-indent. When text is not new, its indentation will
488 /// only be adjusted if the suggested indentation level has *changed*
489 /// since the edit was made.
490 first_line_is_new: bool,
491 indent_size: IndentSize,
492 original_indent_column: Option<u32>,
493}
494
495#[derive(Debug)]
496struct IndentSuggestion {
497 basis_row: u32,
498 delta: Ordering,
499 within_error: bool,
500}
501
502struct BufferChunkHighlights<'a> {
503 captures: SyntaxMapCaptures<'a>,
504 next_capture: Option<SyntaxMapCapture<'a>>,
505 stack: Vec<(usize, HighlightId)>,
506 highlight_maps: Vec<HighlightMap>,
507}
508
509/// An iterator that yields chunks of a buffer's text, along with their
510/// syntax highlights and diagnostic status.
511pub struct BufferChunks<'a> {
512 buffer_snapshot: Option<&'a BufferSnapshot>,
513 range: Range<usize>,
514 chunks: text::Chunks<'a>,
515 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
516 error_depth: usize,
517 warning_depth: usize,
518 information_depth: usize,
519 hint_depth: usize,
520 unnecessary_depth: usize,
521 underline: bool,
522 highlights: Option<BufferChunkHighlights<'a>>,
523}
524
525/// A chunk of a buffer's text, along with its syntax highlight and
526/// diagnostic status.
527#[derive(Clone, Debug, Default)]
528pub struct Chunk<'a> {
529 /// The text of the chunk.
530 pub text: &'a str,
531 /// The syntax highlighting style of the chunk.
532 pub syntax_highlight_id: Option<HighlightId>,
533 /// The highlight style that has been applied to this chunk in
534 /// the editor.
535 pub highlight_style: Option<HighlightStyle>,
536 /// The severity of diagnostic associated with this chunk, if any.
537 pub diagnostic_severity: Option<DiagnosticSeverity>,
538 /// A bitset of which characters are tabs in this string.
539 pub tabs: u128,
540 /// Bitmap of character indices in this chunk
541 pub chars: u128,
542 /// Whether this chunk of text is marked as unnecessary.
543 pub is_unnecessary: bool,
544 /// Whether this chunk of text was originally a tab character.
545 pub is_tab: bool,
546 /// Whether this chunk of text was originally an inlay.
547 pub is_inlay: bool,
548 /// Whether to underline the corresponding text range in the editor.
549 pub underline: bool,
550}
551
552/// A set of edits to a given version of a buffer, computed asynchronously.
553#[derive(Debug)]
554pub struct Diff {
555 pub base_version: clock::Global,
556 pub line_ending: LineEnding,
557 pub edits: Vec<(Range<usize>, Arc<str>)>,
558}
559
560#[derive(Debug, Clone, Copy)]
561pub(crate) struct DiagnosticEndpoint {
562 offset: usize,
563 is_start: bool,
564 underline: bool,
565 severity: DiagnosticSeverity,
566 is_unnecessary: bool,
567}
568
569/// A class of characters, used for characterizing a run of text.
570#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
571pub enum CharKind {
572 /// Whitespace.
573 Whitespace,
574 /// Punctuation.
575 Punctuation,
576 /// Word.
577 Word,
578}
579
580/// Context for character classification within a specific scope.
581#[derive(Copy, Clone, Eq, PartialEq, Debug)]
582pub enum CharScopeContext {
583 /// Character classification for completion queries.
584 ///
585 /// This context treats certain characters as word constituents that would
586 /// normally be considered punctuation, such as '-' in Tailwind classes
587 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
588 Completion,
589 /// Character classification for linked edits.
590 ///
591 /// This context handles characters that should be treated as part of
592 /// identifiers during linked editing operations, such as '.' in JSX
593 /// component names like `<Animated.View>`.
594 LinkedEdit,
595}
596
597/// A runnable is a set of data about a region that could be resolved into a task
598pub struct Runnable {
599 pub tags: SmallVec<[RunnableTag; 1]>,
600 pub language: Arc<Language>,
601 pub buffer: BufferId,
602}
603
604#[derive(Default, Clone, Debug)]
605pub struct HighlightedText {
606 pub text: SharedString,
607 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
608}
609
610#[derive(Default, Debug)]
611struct HighlightedTextBuilder {
612 pub text: String,
613 highlights: Vec<(Range<usize>, HighlightStyle)>,
614}
615
616impl HighlightedText {
617 pub fn from_buffer_range<T: ToOffset>(
618 range: Range<T>,
619 snapshot: &text::BufferSnapshot,
620 syntax_snapshot: &SyntaxSnapshot,
621 override_style: Option<HighlightStyle>,
622 syntax_theme: &SyntaxTheme,
623 ) -> Self {
624 let mut highlighted_text = HighlightedTextBuilder::default();
625 highlighted_text.add_text_from_buffer_range(
626 range,
627 snapshot,
628 syntax_snapshot,
629 override_style,
630 syntax_theme,
631 );
632 highlighted_text.build()
633 }
634
635 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
636 gpui::StyledText::new(self.text.clone())
637 .with_default_highlights(default_style, self.highlights.iter().cloned())
638 }
639
640 /// Returns the first line without leading whitespace unless highlighted
641 /// and a boolean indicating if there are more lines after
642 pub fn first_line_preview(self) -> (Self, bool) {
643 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
644 let first_line = &self.text[..newline_ix];
645
646 // Trim leading whitespace, unless an edit starts prior to it.
647 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
648 if let Some((first_highlight_range, _)) = self.highlights.first() {
649 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
650 }
651
652 let preview_text = &first_line[preview_start_ix..];
653 let preview_highlights = self
654 .highlights
655 .into_iter()
656 .skip_while(|(range, _)| range.end <= preview_start_ix)
657 .take_while(|(range, _)| range.start < newline_ix)
658 .filter_map(|(mut range, highlight)| {
659 range.start = range.start.saturating_sub(preview_start_ix);
660 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
661 if range.is_empty() {
662 None
663 } else {
664 Some((range, highlight))
665 }
666 });
667
668 let preview = Self {
669 text: SharedString::new(preview_text),
670 highlights: preview_highlights.collect(),
671 };
672
673 (preview, self.text.len() > newline_ix)
674 }
675}
676
677impl HighlightedTextBuilder {
678 pub fn build(self) -> HighlightedText {
679 HighlightedText {
680 text: self.text.into(),
681 highlights: self.highlights,
682 }
683 }
684
685 pub fn add_text_from_buffer_range<T: ToOffset>(
686 &mut self,
687 range: Range<T>,
688 snapshot: &text::BufferSnapshot,
689 syntax_snapshot: &SyntaxSnapshot,
690 override_style: Option<HighlightStyle>,
691 syntax_theme: &SyntaxTheme,
692 ) {
693 let range = range.to_offset(snapshot);
694 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
695 let start = self.text.len();
696 self.text.push_str(chunk.text);
697 let end = self.text.len();
698
699 if let Some(highlight_style) = chunk
700 .syntax_highlight_id
701 .and_then(|id| id.style(syntax_theme))
702 {
703 let highlight_style = override_style.map_or(highlight_style, |override_style| {
704 highlight_style.highlight(override_style)
705 });
706 self.highlights.push((start..end, highlight_style));
707 } else if let Some(override_style) = override_style {
708 self.highlights.push((start..end, override_style));
709 }
710 }
711 }
712
713 fn highlighted_chunks<'a>(
714 range: Range<usize>,
715 snapshot: &'a text::BufferSnapshot,
716 syntax_snapshot: &'a SyntaxSnapshot,
717 ) -> BufferChunks<'a> {
718 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
719 grammar
720 .highlights_config
721 .as_ref()
722 .map(|config| &config.query)
723 });
724
725 let highlight_maps = captures
726 .grammars()
727 .iter()
728 .map(|grammar| grammar.highlight_map())
729 .collect();
730
731 BufferChunks::new(
732 snapshot.as_rope(),
733 range,
734 Some((captures, highlight_maps)),
735 false,
736 None,
737 )
738 }
739}
740
741#[derive(Clone)]
742pub struct EditPreview {
743 old_snapshot: text::BufferSnapshot,
744 applied_edits_snapshot: text::BufferSnapshot,
745 syntax_snapshot: SyntaxSnapshot,
746}
747
748impl EditPreview {
749 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
750 let (first, _) = edits.first()?;
751 let (last, _) = edits.last()?;
752
753 let start = first.start.to_point(&self.old_snapshot);
754 let old_end = last.end.to_point(&self.old_snapshot);
755 let new_end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 let start = Point::new(start.row.saturating_sub(3), 0);
761 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
762 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
763
764 Some(unified_diff(
765 &self
766 .old_snapshot
767 .text_for_range(start..old_end)
768 .collect::<String>(),
769 &self
770 .applied_edits_snapshot
771 .text_for_range(start..new_end)
772 .collect::<String>(),
773 ))
774 }
775
776 pub fn highlight_edits(
777 &self,
778 current_snapshot: &BufferSnapshot,
779 edits: &[(Range<Anchor>, impl AsRef<str>)],
780 include_deletions: bool,
781 cx: &App,
782 ) -> HighlightedText {
783 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
784 return HighlightedText::default();
785 };
786
787 let mut highlighted_text = HighlightedTextBuilder::default();
788
789 let visible_range_in_preview_snapshot =
790 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
791 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
792
793 let insertion_highlight_style = HighlightStyle {
794 background_color: Some(cx.theme().status().created_background),
795 ..Default::default()
796 };
797 let deletion_highlight_style = HighlightStyle {
798 background_color: Some(cx.theme().status().deleted_background),
799 ..Default::default()
800 };
801 let syntax_theme = cx.theme().syntax();
802
803 for (range, edit_text) in edits {
804 let edit_new_end_in_preview_snapshot = range
805 .end
806 .bias_right(&self.old_snapshot)
807 .to_offset(&self.applied_edits_snapshot);
808 let edit_start_in_preview_snapshot =
809 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
810
811 let unchanged_range_in_preview_snapshot =
812 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
813 if !unchanged_range_in_preview_snapshot.is_empty() {
814 highlighted_text.add_text_from_buffer_range(
815 unchanged_range_in_preview_snapshot,
816 &self.applied_edits_snapshot,
817 &self.syntax_snapshot,
818 None,
819 syntax_theme,
820 );
821 }
822
823 let range_in_current_snapshot = range.to_offset(current_snapshot);
824 if include_deletions && !range_in_current_snapshot.is_empty() {
825 highlighted_text.add_text_from_buffer_range(
826 range_in_current_snapshot,
827 ¤t_snapshot.text,
828 ¤t_snapshot.syntax,
829 Some(deletion_highlight_style),
830 syntax_theme,
831 );
832 }
833
834 if !edit_text.as_ref().is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
837 &self.applied_edits_snapshot,
838 &self.syntax_snapshot,
839 Some(insertion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
845 }
846
847 highlighted_text.add_text_from_buffer_range(
848 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
849 &self.applied_edits_snapshot,
850 &self.syntax_snapshot,
851 None,
852 syntax_theme,
853 );
854
855 highlighted_text.build()
856 }
857
858 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
859 cx.new(|cx| {
860 let mut buffer = Buffer::local_normalized(
861 self.applied_edits_snapshot.as_rope().clone(),
862 self.applied_edits_snapshot.line_ending(),
863 cx,
864 );
865 buffer.set_language(self.syntax_snapshot.root_language(), cx);
866 buffer
867 })
868 }
869
870 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
871 let (first, _) = edits.first()?;
872 let (last, _) = edits.last()?;
873
874 let start = first
875 .start
876 .bias_left(&self.old_snapshot)
877 .to_point(&self.applied_edits_snapshot);
878 let end = last
879 .end
880 .bias_right(&self.old_snapshot)
881 .to_point(&self.applied_edits_snapshot);
882
883 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
884 let range = Point::new(start.row, 0)
885 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
886
887 Some(range)
888 }
889}
890
891#[derive(Clone, Debug, PartialEq, Eq)]
892pub struct BracketMatch<T> {
893 pub open_range: Range<T>,
894 pub close_range: Range<T>,
895 pub newline_only: bool,
896 pub syntax_layer_depth: usize,
897 pub color_index: Option<usize>,
898}
899
900impl<T> BracketMatch<T> {
901 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
902 (self.open_range, self.close_range)
903 }
904}
905
906impl Buffer {
907 /// Create a new buffer with the given base text.
908 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
909 Self::build(
910 TextBuffer::new(
911 ReplicaId::LOCAL,
912 cx.entity_id().as_non_zero_u64().into(),
913 base_text.into(),
914 ),
915 None,
916 Capability::ReadWrite,
917 )
918 }
919
920 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
921 pub fn local_normalized(
922 base_text_normalized: Rope,
923 line_ending: LineEnding,
924 cx: &Context<Self>,
925 ) -> Self {
926 Self::build(
927 TextBuffer::new_normalized(
928 ReplicaId::LOCAL,
929 cx.entity_id().as_non_zero_u64().into(),
930 line_ending,
931 base_text_normalized,
932 ),
933 None,
934 Capability::ReadWrite,
935 )
936 }
937
938 /// Create a new buffer that is a replica of a remote buffer.
939 pub fn remote(
940 remote_id: BufferId,
941 replica_id: ReplicaId,
942 capability: Capability,
943 base_text: impl Into<String>,
944 ) -> Self {
945 Self::build(
946 TextBuffer::new(replica_id, remote_id, base_text.into()),
947 None,
948 capability,
949 )
950 }
951
952 /// Create a new buffer that is a replica of a remote buffer, populating its
953 /// state from the given protobuf message.
954 pub fn from_proto(
955 replica_id: ReplicaId,
956 capability: Capability,
957 message: proto::BufferState,
958 file: Option<Arc<dyn File>>,
959 ) -> Result<Self> {
960 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
961 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
962 let mut this = Self::build(buffer, file, capability);
963 this.text.set_line_ending(proto::deserialize_line_ending(
964 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
965 ));
966 this.saved_version = proto::deserialize_version(&message.saved_version);
967 this.saved_mtime = message.saved_mtime.map(|time| time.into());
968 Ok(this)
969 }
970
971 /// Serialize the buffer's state to a protobuf message.
972 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
973 proto::BufferState {
974 id: self.remote_id().into(),
975 file: self.file.as_ref().map(|f| f.to_proto(cx)),
976 base_text: self.base_text().to_string(),
977 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
978 saved_version: proto::serialize_version(&self.saved_version),
979 saved_mtime: self.saved_mtime.map(|time| time.into()),
980 }
981 }
982
983 /// Serialize as protobufs all of the changes to the buffer since the given version.
984 pub fn serialize_ops(
985 &self,
986 since: Option<clock::Global>,
987 cx: &App,
988 ) -> Task<Vec<proto::Operation>> {
989 let mut operations = Vec::new();
990 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
991
992 operations.extend(self.remote_selections.iter().map(|(_, set)| {
993 proto::serialize_operation(&Operation::UpdateSelections {
994 selections: set.selections.clone(),
995 lamport_timestamp: set.lamport_timestamp,
996 line_mode: set.line_mode,
997 cursor_shape: set.cursor_shape,
998 })
999 }));
1000
1001 for (server_id, diagnostics) in &self.diagnostics {
1002 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1003 lamport_timestamp: self.diagnostics_timestamp,
1004 server_id: *server_id,
1005 diagnostics: diagnostics.iter().cloned().collect(),
1006 }));
1007 }
1008
1009 for (server_id, completions) in &self.completion_triggers_per_language_server {
1010 operations.push(proto::serialize_operation(
1011 &Operation::UpdateCompletionTriggers {
1012 triggers: completions.iter().cloned().collect(),
1013 lamport_timestamp: self.completion_triggers_timestamp,
1014 server_id: *server_id,
1015 },
1016 ));
1017 }
1018
1019 let text_operations = self.text.operations().clone();
1020 cx.background_spawn(async move {
1021 let since = since.unwrap_or_default();
1022 operations.extend(
1023 text_operations
1024 .iter()
1025 .filter(|(_, op)| !since.observed(op.timestamp()))
1026 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1027 );
1028 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1029 operations
1030 })
1031 }
1032
1033 /// Assign a language to the buffer, returning the buffer.
1034 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1035 self.set_language(Some(language), cx);
1036 self
1037 }
1038
1039 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1040 pub fn with_language_immediate(
1041 mut self,
1042 language: Arc<Language>,
1043 cx: &mut Context<Self>,
1044 ) -> Self {
1045 self.set_language_immediate(Some(language), cx);
1046 self
1047 }
1048
1049 /// Returns the [`Capability`] of this buffer.
1050 pub fn capability(&self) -> Capability {
1051 self.capability
1052 }
1053
1054 /// Whether this buffer can only be read.
1055 pub fn read_only(&self) -> bool {
1056 self.capability == Capability::ReadOnly
1057 }
1058
1059 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1060 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1061 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1062 let snapshot = buffer.snapshot();
1063 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1064 let tree_sitter_data = TreeSitterData::new(snapshot);
1065 Self {
1066 saved_mtime,
1067 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1068 saved_version: buffer.version(),
1069 preview_version: buffer.version(),
1070 reload_task: None,
1071 transaction_depth: 0,
1072 was_dirty_before_starting_transaction: None,
1073 has_unsaved_edits: Cell::new((buffer.version(), false)),
1074 text: buffer,
1075 branch_state: None,
1076 file,
1077 capability,
1078 syntax_map,
1079 reparse: None,
1080 non_text_state_update_count: 0,
1081 sync_parse_timeout: Duration::from_millis(1),
1082 parse_status: watch::channel(ParseStatus::Idle),
1083 autoindent_requests: Default::default(),
1084 wait_for_autoindent_txs: Default::default(),
1085 pending_autoindent: Default::default(),
1086 language: None,
1087 remote_selections: Default::default(),
1088 diagnostics: Default::default(),
1089 diagnostics_timestamp: Lamport::MIN,
1090 completion_triggers: Default::default(),
1091 completion_triggers_per_language_server: Default::default(),
1092 completion_triggers_timestamp: Lamport::MIN,
1093 deferred_ops: OperationQueue::new(),
1094 has_conflict: false,
1095 change_bits: Default::default(),
1096 _subscriptions: Vec::new(),
1097 }
1098 }
1099
1100 pub fn build_snapshot(
1101 text: Rope,
1102 language: Option<Arc<Language>>,
1103 language_registry: Option<Arc<LanguageRegistry>>,
1104 cx: &mut App,
1105 ) -> impl Future<Output = BufferSnapshot> + use<> {
1106 let entity_id = cx.reserve_entity::<Self>().entity_id();
1107 let buffer_id = entity_id.as_non_zero_u64().into();
1108 async move {
1109 let text =
1110 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1111 .snapshot();
1112 let mut syntax = SyntaxMap::new(&text).snapshot();
1113 if let Some(language) = language.clone() {
1114 let language_registry = language_registry.clone();
1115 syntax.reparse(&text, language_registry, language);
1116 }
1117 let tree_sitter_data = TreeSitterData::new(text.clone());
1118 BufferSnapshot {
1119 text,
1120 syntax,
1121 file: None,
1122 diagnostics: Default::default(),
1123 remote_selections: Default::default(),
1124 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1125 language,
1126 non_text_state_update_count: 0,
1127 }
1128 }
1129 }
1130
1131 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1132 let entity_id = cx.reserve_entity::<Self>().entity_id();
1133 let buffer_id = entity_id.as_non_zero_u64().into();
1134 let text = TextBuffer::new_normalized(
1135 ReplicaId::LOCAL,
1136 buffer_id,
1137 Default::default(),
1138 Rope::new(),
1139 )
1140 .snapshot();
1141 let syntax = SyntaxMap::new(&text).snapshot();
1142 let tree_sitter_data = TreeSitterData::new(text.clone());
1143 BufferSnapshot {
1144 text,
1145 syntax,
1146 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1147 file: None,
1148 diagnostics: Default::default(),
1149 remote_selections: Default::default(),
1150 language: None,
1151 non_text_state_update_count: 0,
1152 }
1153 }
1154
1155 #[cfg(any(test, feature = "test-support"))]
1156 pub fn build_snapshot_sync(
1157 text: Rope,
1158 language: Option<Arc<Language>>,
1159 language_registry: Option<Arc<LanguageRegistry>>,
1160 cx: &mut App,
1161 ) -> BufferSnapshot {
1162 let entity_id = cx.reserve_entity::<Self>().entity_id();
1163 let buffer_id = entity_id.as_non_zero_u64().into();
1164 let text =
1165 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1166 .snapshot();
1167 let mut syntax = SyntaxMap::new(&text).snapshot();
1168 if let Some(language) = language.clone() {
1169 syntax.reparse(&text, language_registry, language);
1170 }
1171 let tree_sitter_data = TreeSitterData::new(text.clone());
1172 BufferSnapshot {
1173 text,
1174 syntax,
1175 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1176 file: None,
1177 diagnostics: Default::default(),
1178 remote_selections: Default::default(),
1179 language,
1180 non_text_state_update_count: 0,
1181 }
1182 }
1183
1184 /// Retrieve a snapshot of the buffer's current state. This is computationally
1185 /// cheap, and allows reading from the buffer on a background thread.
1186 pub fn snapshot(&self) -> BufferSnapshot {
1187 let text = self.text.snapshot();
1188 let mut syntax_map = self.syntax_map.lock();
1189 syntax_map.interpolate(&text);
1190 let syntax = syntax_map.snapshot();
1191
1192 BufferSnapshot {
1193 text,
1194 syntax,
1195 tree_sitter_data: self.tree_sitter_data.clone(),
1196 file: self.file.clone(),
1197 remote_selections: self.remote_selections.clone(),
1198 diagnostics: self.diagnostics.clone(),
1199 language: self.language.clone(),
1200 non_text_state_update_count: self.non_text_state_update_count,
1201 }
1202 }
1203
1204 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1205 let this = cx.entity();
1206 cx.new(|cx| {
1207 let mut branch = Self {
1208 branch_state: Some(BufferBranchState {
1209 base_buffer: this.clone(),
1210 merged_operations: Default::default(),
1211 }),
1212 language: self.language.clone(),
1213 has_conflict: self.has_conflict,
1214 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1215 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1216 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1217 };
1218 if let Some(language_registry) = self.language_registry() {
1219 branch.set_language_registry(language_registry);
1220 }
1221
1222 // Reparse the branch buffer so that we get syntax highlighting immediately.
1223 branch.reparse(cx, true);
1224
1225 branch
1226 })
1227 }
1228
1229 pub fn preview_edits(
1230 &self,
1231 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1232 cx: &App,
1233 ) -> Task<EditPreview> {
1234 let registry = self.language_registry();
1235 let language = self.language().cloned();
1236 let old_snapshot = self.text.snapshot();
1237 let mut branch_buffer = self.text.branch();
1238 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1239 cx.background_spawn(async move {
1240 if !edits.is_empty() {
1241 if let Some(language) = language.clone() {
1242 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1243 }
1244
1245 branch_buffer.edit(edits.iter().cloned());
1246 let snapshot = branch_buffer.snapshot();
1247 syntax_snapshot.interpolate(&snapshot);
1248
1249 if let Some(language) = language {
1250 syntax_snapshot.reparse(&snapshot, registry, language);
1251 }
1252 }
1253 EditPreview {
1254 old_snapshot,
1255 applied_edits_snapshot: branch_buffer.snapshot(),
1256 syntax_snapshot,
1257 }
1258 })
1259 }
1260
1261 /// Applies all of the changes in this buffer that intersect any of the
1262 /// given `ranges` to its base buffer.
1263 ///
1264 /// If `ranges` is empty, then all changes will be applied. This buffer must
1265 /// be a branch buffer to call this method.
1266 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1267 let Some(base_buffer) = self.base_buffer() else {
1268 debug_panic!("not a branch buffer");
1269 return;
1270 };
1271
1272 let mut ranges = if ranges.is_empty() {
1273 &[0..usize::MAX]
1274 } else {
1275 ranges.as_slice()
1276 }
1277 .iter()
1278 .peekable();
1279
1280 let mut edits = Vec::new();
1281 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1282 let mut is_included = false;
1283 while let Some(range) = ranges.peek() {
1284 if range.end < edit.new.start {
1285 ranges.next().unwrap();
1286 } else {
1287 if range.start <= edit.new.end {
1288 is_included = true;
1289 }
1290 break;
1291 }
1292 }
1293
1294 if is_included {
1295 edits.push((
1296 edit.old.clone(),
1297 self.text_for_range(edit.new.clone()).collect::<String>(),
1298 ));
1299 }
1300 }
1301
1302 let operation = base_buffer.update(cx, |base_buffer, cx| {
1303 // cx.emit(BufferEvent::DiffBaseChanged);
1304 base_buffer.edit(edits, None, cx)
1305 });
1306
1307 if let Some(operation) = operation
1308 && let Some(BufferBranchState {
1309 merged_operations, ..
1310 }) = &mut self.branch_state
1311 {
1312 merged_operations.push(operation);
1313 }
1314 }
1315
1316 fn on_base_buffer_event(
1317 &mut self,
1318 _: Entity<Buffer>,
1319 event: &BufferEvent,
1320 cx: &mut Context<Self>,
1321 ) {
1322 let BufferEvent::Operation { operation, .. } = event else {
1323 return;
1324 };
1325 let Some(BufferBranchState {
1326 merged_operations, ..
1327 }) = &mut self.branch_state
1328 else {
1329 return;
1330 };
1331
1332 let mut operation_to_undo = None;
1333 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1334 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1335 {
1336 merged_operations.remove(ix);
1337 operation_to_undo = Some(operation.timestamp);
1338 }
1339
1340 self.apply_ops([operation.clone()], cx);
1341
1342 if let Some(timestamp) = operation_to_undo {
1343 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1344 self.undo_operations(counts, cx);
1345 }
1346 }
1347
1348 #[cfg(test)]
1349 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1350 &self.text
1351 }
1352
1353 /// Retrieve a snapshot of the buffer's raw text, without any
1354 /// language-related state like the syntax tree or diagnostics.
1355 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1356 self.text.snapshot()
1357 }
1358
1359 /// The file associated with the buffer, if any.
1360 pub fn file(&self) -> Option<&Arc<dyn File>> {
1361 self.file.as_ref()
1362 }
1363
1364 /// The version of the buffer that was last saved or reloaded from disk.
1365 pub fn saved_version(&self) -> &clock::Global {
1366 &self.saved_version
1367 }
1368
1369 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1370 pub fn saved_mtime(&self) -> Option<MTime> {
1371 self.saved_mtime
1372 }
1373
1374 /// Assign a language to the buffer.
1375 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1376 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1377 }
1378
1379 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1380 pub fn set_language_immediate(
1381 &mut self,
1382 language: Option<Arc<Language>>,
1383 cx: &mut Context<Self>,
1384 ) {
1385 self.set_language_(language, true, cx);
1386 }
1387
1388 fn set_language_(
1389 &mut self,
1390 language: Option<Arc<Language>>,
1391 may_block: bool,
1392 cx: &mut Context<Self>,
1393 ) {
1394 self.non_text_state_update_count += 1;
1395 self.syntax_map.lock().clear(&self.text);
1396 self.language = language;
1397 self.was_changed();
1398 self.reparse(cx, may_block);
1399 cx.emit(BufferEvent::LanguageChanged);
1400 }
1401
1402 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1403 /// other languages if parts of the buffer are written in different languages.
1404 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1405 self.syntax_map
1406 .lock()
1407 .set_language_registry(language_registry);
1408 }
1409
1410 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1411 self.syntax_map.lock().language_registry()
1412 }
1413
1414 /// Assign the line ending type to the buffer.
1415 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1416 self.text.set_line_ending(line_ending);
1417
1418 let lamport_timestamp = self.text.lamport_clock.tick();
1419 self.send_operation(
1420 Operation::UpdateLineEnding {
1421 line_ending,
1422 lamport_timestamp,
1423 },
1424 true,
1425 cx,
1426 );
1427 }
1428
1429 /// Assign the buffer a new [`Capability`].
1430 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1431 if self.capability != capability {
1432 self.capability = capability;
1433 cx.emit(BufferEvent::CapabilityChanged)
1434 }
1435 }
1436
1437 /// This method is called to signal that the buffer has been saved.
1438 pub fn did_save(
1439 &mut self,
1440 version: clock::Global,
1441 mtime: Option<MTime>,
1442 cx: &mut Context<Self>,
1443 ) {
1444 self.saved_version = version.clone();
1445 self.has_unsaved_edits.set((version, false));
1446 self.has_conflict = false;
1447 self.saved_mtime = mtime;
1448 self.was_changed();
1449 cx.emit(BufferEvent::Saved);
1450 cx.notify();
1451 }
1452
1453 /// Reloads the contents of the buffer from disk.
1454 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1455 let (tx, rx) = futures::channel::oneshot::channel();
1456 let prev_version = self.text.version();
1457 self.reload_task = Some(cx.spawn(async move |this, cx| {
1458 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1459 let file = this.file.as_ref()?.as_local()?;
1460
1461 Some((file.disk_state().mtime(), file.load(cx)))
1462 })?
1463 else {
1464 return Ok(());
1465 };
1466
1467 let new_text = new_text.await?;
1468 let diff = this
1469 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1470 .await;
1471 this.update(cx, |this, cx| {
1472 if this.version() == diff.base_version {
1473 this.finalize_last_transaction();
1474 this.apply_diff(diff, cx);
1475 tx.send(this.finalize_last_transaction().cloned()).ok();
1476 this.has_conflict = false;
1477 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1478 } else {
1479 if !diff.edits.is_empty()
1480 || this
1481 .edits_since::<usize>(&diff.base_version)
1482 .next()
1483 .is_some()
1484 {
1485 this.has_conflict = true;
1486 }
1487
1488 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1489 }
1490
1491 this.reload_task.take();
1492 })
1493 }));
1494 rx
1495 }
1496
1497 /// This method is called to signal that the buffer has been reloaded.
1498 pub fn did_reload(
1499 &mut self,
1500 version: clock::Global,
1501 line_ending: LineEnding,
1502 mtime: Option<MTime>,
1503 cx: &mut Context<Self>,
1504 ) {
1505 self.saved_version = version;
1506 self.has_unsaved_edits
1507 .set((self.saved_version.clone(), false));
1508 self.text.set_line_ending(line_ending);
1509 self.saved_mtime = mtime;
1510 cx.emit(BufferEvent::Reloaded);
1511 cx.notify();
1512 }
1513
1514 /// Updates the [`File`] backing this buffer. This should be called when
1515 /// the file has changed or has been deleted.
1516 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1517 let was_dirty = self.is_dirty();
1518 let mut file_changed = false;
1519
1520 if let Some(old_file) = self.file.as_ref() {
1521 if new_file.path() != old_file.path() {
1522 file_changed = true;
1523 }
1524
1525 let old_state = old_file.disk_state();
1526 let new_state = new_file.disk_state();
1527 if old_state != new_state {
1528 file_changed = true;
1529 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1530 cx.emit(BufferEvent::ReloadNeeded)
1531 }
1532 }
1533 } else {
1534 file_changed = true;
1535 };
1536
1537 self.file = Some(new_file);
1538 if file_changed {
1539 self.was_changed();
1540 self.non_text_state_update_count += 1;
1541 if was_dirty != self.is_dirty() {
1542 cx.emit(BufferEvent::DirtyChanged);
1543 }
1544 cx.emit(BufferEvent::FileHandleChanged);
1545 cx.notify();
1546 }
1547 }
1548
1549 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1550 Some(self.branch_state.as_ref()?.base_buffer.clone())
1551 }
1552
1553 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1554 pub fn language(&self) -> Option<&Arc<Language>> {
1555 self.language.as_ref()
1556 }
1557
1558 /// Returns the [`Language`] at the given location.
1559 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1560 let offset = position.to_offset(self);
1561 let mut is_first = true;
1562 let start_anchor = self.anchor_before(offset);
1563 let end_anchor = self.anchor_after(offset);
1564 self.syntax_map
1565 .lock()
1566 .layers_for_range(offset..offset, &self.text, false)
1567 .filter(|layer| {
1568 if is_first {
1569 is_first = false;
1570 return true;
1571 }
1572
1573 layer
1574 .included_sub_ranges
1575 .map(|sub_ranges| {
1576 sub_ranges.iter().any(|sub_range| {
1577 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1578 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1579 !is_before_start && !is_after_end
1580 })
1581 })
1582 .unwrap_or(true)
1583 })
1584 .last()
1585 .map(|info| info.language.clone())
1586 .or_else(|| self.language.clone())
1587 }
1588
1589 /// Returns each [`Language`] for the active syntax layers at the given location.
1590 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1591 let offset = position.to_offset(self);
1592 let mut languages: Vec<Arc<Language>> = self
1593 .syntax_map
1594 .lock()
1595 .layers_for_range(offset..offset, &self.text, false)
1596 .map(|info| info.language.clone())
1597 .collect();
1598
1599 if languages.is_empty()
1600 && let Some(buffer_language) = self.language()
1601 {
1602 languages.push(buffer_language.clone());
1603 }
1604
1605 languages
1606 }
1607
1608 /// An integer version number that accounts for all updates besides
1609 /// the buffer's text itself (which is versioned via a version vector).
1610 pub fn non_text_state_update_count(&self) -> usize {
1611 self.non_text_state_update_count
1612 }
1613
1614 /// Whether the buffer is being parsed in the background.
1615 #[cfg(any(test, feature = "test-support"))]
1616 pub fn is_parsing(&self) -> bool {
1617 self.reparse.is_some()
1618 }
1619
1620 /// Indicates whether the buffer contains any regions that may be
1621 /// written in a language that hasn't been loaded yet.
1622 pub fn contains_unknown_injections(&self) -> bool {
1623 self.syntax_map.lock().contains_unknown_injections()
1624 }
1625
1626 #[cfg(any(test, feature = "test-support"))]
1627 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1628 self.sync_parse_timeout = timeout;
1629 }
1630
1631 /// Called after an edit to synchronize the buffer's main parse tree with
1632 /// the buffer's new underlying state.
1633 ///
1634 /// Locks the syntax map and interpolates the edits since the last reparse
1635 /// into the foreground syntax tree.
1636 ///
1637 /// Then takes a stable snapshot of the syntax map before unlocking it.
1638 /// The snapshot with the interpolated edits is sent to a background thread,
1639 /// where we ask Tree-sitter to perform an incremental parse.
1640 ///
1641 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1642 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1643 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1644 ///
1645 /// If we time out waiting on the parse, we spawn a second task waiting
1646 /// until the parse does complete and return with the interpolated tree still
1647 /// in the foreground. When the background parse completes, call back into
1648 /// the main thread and assign the foreground parse state.
1649 ///
1650 /// If the buffer or grammar changed since the start of the background parse,
1651 /// initiate an additional reparse recursively. To avoid concurrent parses
1652 /// for the same buffer, we only initiate a new parse if we are not already
1653 /// parsing in the background.
1654 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1655 if self.reparse.is_some() {
1656 return;
1657 }
1658 let language = if let Some(language) = self.language.clone() {
1659 language
1660 } else {
1661 return;
1662 };
1663
1664 let text = self.text_snapshot();
1665 let parsed_version = self.version();
1666
1667 let mut syntax_map = self.syntax_map.lock();
1668 syntax_map.interpolate(&text);
1669 let language_registry = syntax_map.language_registry();
1670 let mut syntax_snapshot = syntax_map.snapshot();
1671 drop(syntax_map);
1672
1673 let parse_task = cx.background_spawn({
1674 let language = language.clone();
1675 let language_registry = language_registry.clone();
1676 async move {
1677 syntax_snapshot.reparse(&text, language_registry, language);
1678 syntax_snapshot
1679 }
1680 });
1681
1682 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1683 if may_block {
1684 match cx
1685 .background_executor()
1686 .block_with_timeout(self.sync_parse_timeout, parse_task)
1687 {
1688 Ok(new_syntax_snapshot) => {
1689 self.did_finish_parsing(new_syntax_snapshot, cx);
1690 self.reparse = None;
1691 }
1692 Err(parse_task) => {
1693 self.reparse = Some(cx.spawn(async move |this, cx| {
1694 let new_syntax_map = cx.background_spawn(parse_task).await;
1695 this.update(cx, move |this, cx| {
1696 let grammar_changed = || {
1697 this.language.as_ref().is_none_or(|current_language| {
1698 !Arc::ptr_eq(&language, current_language)
1699 })
1700 };
1701 let language_registry_changed = || {
1702 new_syntax_map.contains_unknown_injections()
1703 && language_registry.is_some_and(|registry| {
1704 registry.version()
1705 != new_syntax_map.language_registry_version()
1706 })
1707 };
1708 let parse_again = this.version.changed_since(&parsed_version)
1709 || language_registry_changed()
1710 || grammar_changed();
1711 this.did_finish_parsing(new_syntax_map, cx);
1712 this.reparse = None;
1713 if parse_again {
1714 this.reparse(cx, false);
1715 }
1716 })
1717 .ok();
1718 }));
1719 }
1720 }
1721 } else {
1722 self.reparse = Some(cx.spawn(async move |this, cx| {
1723 let new_syntax_map = cx.background_spawn(parse_task).await;
1724 this.update(cx, move |this, cx| {
1725 let grammar_changed = || {
1726 this.language.as_ref().is_none_or(|current_language| {
1727 !Arc::ptr_eq(&language, current_language)
1728 })
1729 };
1730 let language_registry_changed = || {
1731 new_syntax_map.contains_unknown_injections()
1732 && language_registry.is_some_and(|registry| {
1733 registry.version() != new_syntax_map.language_registry_version()
1734 })
1735 };
1736 let parse_again = this.version.changed_since(&parsed_version)
1737 || language_registry_changed()
1738 || grammar_changed();
1739 this.did_finish_parsing(new_syntax_map, cx);
1740 this.reparse = None;
1741 if parse_again {
1742 this.reparse(cx, false);
1743 }
1744 })
1745 .ok();
1746 }));
1747 }
1748 }
1749
1750 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1751 self.was_changed();
1752 self.non_text_state_update_count += 1;
1753 self.syntax_map.lock().did_parse(syntax_snapshot);
1754 self.request_autoindent(cx);
1755 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1756 self.tree_sitter_data.lock().clear();
1757 cx.emit(BufferEvent::Reparsed);
1758 cx.notify();
1759 }
1760
1761 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1762 self.parse_status.1.clone()
1763 }
1764
1765 /// Wait until the buffer is no longer parsing
1766 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1767 let mut parse_status = self.parse_status();
1768 async move {
1769 while *parse_status.borrow() != ParseStatus::Idle {
1770 if parse_status.changed().await.is_err() {
1771 break;
1772 }
1773 }
1774 }
1775 }
1776
1777 /// Assign to the buffer a set of diagnostics created by a given language server.
1778 pub fn update_diagnostics(
1779 &mut self,
1780 server_id: LanguageServerId,
1781 diagnostics: DiagnosticSet,
1782 cx: &mut Context<Self>,
1783 ) {
1784 let lamport_timestamp = self.text.lamport_clock.tick();
1785 let op = Operation::UpdateDiagnostics {
1786 server_id,
1787 diagnostics: diagnostics.iter().cloned().collect(),
1788 lamport_timestamp,
1789 };
1790
1791 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1792 self.send_operation(op, true, cx);
1793 }
1794
1795 pub fn buffer_diagnostics(
1796 &self,
1797 for_server: Option<LanguageServerId>,
1798 ) -> Vec<&DiagnosticEntry<Anchor>> {
1799 match for_server {
1800 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1801 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1802 Err(_) => Vec::new(),
1803 },
1804 None => self
1805 .diagnostics
1806 .iter()
1807 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1808 .collect(),
1809 }
1810 }
1811
1812 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1813 if let Some(indent_sizes) = self.compute_autoindents() {
1814 let indent_sizes = cx.background_spawn(indent_sizes);
1815 match cx
1816 .background_executor()
1817 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1818 {
1819 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1820 Err(indent_sizes) => {
1821 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1822 let indent_sizes = indent_sizes.await;
1823 this.update(cx, |this, cx| {
1824 this.apply_autoindents(indent_sizes, cx);
1825 })
1826 .ok();
1827 }));
1828 }
1829 }
1830 } else {
1831 self.autoindent_requests.clear();
1832 for tx in self.wait_for_autoindent_txs.drain(..) {
1833 tx.send(()).ok();
1834 }
1835 }
1836 }
1837
1838 fn compute_autoindents(
1839 &self,
1840 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1841 let max_rows_between_yields = 100;
1842 let snapshot = self.snapshot();
1843 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1844 return None;
1845 }
1846
1847 let autoindent_requests = self.autoindent_requests.clone();
1848 Some(async move {
1849 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1850 for request in autoindent_requests {
1851 // Resolve each edited range to its row in the current buffer and in the
1852 // buffer before this batch of edits.
1853 let mut row_ranges = Vec::new();
1854 let mut old_to_new_rows = BTreeMap::new();
1855 let mut language_indent_sizes_by_new_row = Vec::new();
1856 for entry in &request.entries {
1857 let position = entry.range.start;
1858 let new_row = position.to_point(&snapshot).row;
1859 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1860 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1861
1862 if !entry.first_line_is_new {
1863 let old_row = position.to_point(&request.before_edit).row;
1864 old_to_new_rows.insert(old_row, new_row);
1865 }
1866 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1867 }
1868
1869 // Build a map containing the suggested indentation for each of the edited lines
1870 // with respect to the state of the buffer before these edits. This map is keyed
1871 // by the rows for these lines in the current state of the buffer.
1872 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1873 let old_edited_ranges =
1874 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1875 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1876 let mut language_indent_size = IndentSize::default();
1877 for old_edited_range in old_edited_ranges {
1878 let suggestions = request
1879 .before_edit
1880 .suggest_autoindents(old_edited_range.clone())
1881 .into_iter()
1882 .flatten();
1883 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1884 if let Some(suggestion) = suggestion {
1885 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1886
1887 // Find the indent size based on the language for this row.
1888 while let Some((row, size)) = language_indent_sizes.peek() {
1889 if *row > new_row {
1890 break;
1891 }
1892 language_indent_size = *size;
1893 language_indent_sizes.next();
1894 }
1895
1896 let suggested_indent = old_to_new_rows
1897 .get(&suggestion.basis_row)
1898 .and_then(|from_row| {
1899 Some(old_suggestions.get(from_row).copied()?.0)
1900 })
1901 .unwrap_or_else(|| {
1902 request
1903 .before_edit
1904 .indent_size_for_line(suggestion.basis_row)
1905 })
1906 .with_delta(suggestion.delta, language_indent_size);
1907 old_suggestions
1908 .insert(new_row, (suggested_indent, suggestion.within_error));
1909 }
1910 }
1911 yield_now().await;
1912 }
1913
1914 // Compute new suggestions for each line, but only include them in the result
1915 // if they differ from the old suggestion for that line.
1916 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1917 let mut language_indent_size = IndentSize::default();
1918 for (row_range, original_indent_column) in row_ranges {
1919 let new_edited_row_range = if request.is_block_mode {
1920 row_range.start..row_range.start + 1
1921 } else {
1922 row_range.clone()
1923 };
1924
1925 let suggestions = snapshot
1926 .suggest_autoindents(new_edited_row_range.clone())
1927 .into_iter()
1928 .flatten();
1929 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1930 if let Some(suggestion) = suggestion {
1931 // Find the indent size based on the language for this row.
1932 while let Some((row, size)) = language_indent_sizes.peek() {
1933 if *row > new_row {
1934 break;
1935 }
1936 language_indent_size = *size;
1937 language_indent_sizes.next();
1938 }
1939
1940 let suggested_indent = indent_sizes
1941 .get(&suggestion.basis_row)
1942 .copied()
1943 .map(|e| e.0)
1944 .unwrap_or_else(|| {
1945 snapshot.indent_size_for_line(suggestion.basis_row)
1946 })
1947 .with_delta(suggestion.delta, language_indent_size);
1948
1949 if old_suggestions.get(&new_row).is_none_or(
1950 |(old_indentation, was_within_error)| {
1951 suggested_indent != *old_indentation
1952 && (!suggestion.within_error || *was_within_error)
1953 },
1954 ) {
1955 indent_sizes.insert(
1956 new_row,
1957 (suggested_indent, request.ignore_empty_lines),
1958 );
1959 }
1960 }
1961 }
1962
1963 if let (true, Some(original_indent_column)) =
1964 (request.is_block_mode, original_indent_column)
1965 {
1966 let new_indent =
1967 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1968 *indent
1969 } else {
1970 snapshot.indent_size_for_line(row_range.start)
1971 };
1972 let delta = new_indent.len as i64 - original_indent_column as i64;
1973 if delta != 0 {
1974 for row in row_range.skip(1) {
1975 indent_sizes.entry(row).or_insert_with(|| {
1976 let mut size = snapshot.indent_size_for_line(row);
1977 if size.kind == new_indent.kind {
1978 match delta.cmp(&0) {
1979 Ordering::Greater => size.len += delta as u32,
1980 Ordering::Less => {
1981 size.len = size.len.saturating_sub(-delta as u32)
1982 }
1983 Ordering::Equal => {}
1984 }
1985 }
1986 (size, request.ignore_empty_lines)
1987 });
1988 }
1989 }
1990 }
1991
1992 yield_now().await;
1993 }
1994 }
1995
1996 indent_sizes
1997 .into_iter()
1998 .filter_map(|(row, (indent, ignore_empty_lines))| {
1999 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2000 None
2001 } else {
2002 Some((row, indent))
2003 }
2004 })
2005 .collect()
2006 })
2007 }
2008
2009 fn apply_autoindents(
2010 &mut self,
2011 indent_sizes: BTreeMap<u32, IndentSize>,
2012 cx: &mut Context<Self>,
2013 ) {
2014 self.autoindent_requests.clear();
2015 for tx in self.wait_for_autoindent_txs.drain(..) {
2016 tx.send(()).ok();
2017 }
2018
2019 let edits: Vec<_> = indent_sizes
2020 .into_iter()
2021 .filter_map(|(row, indent_size)| {
2022 let current_size = indent_size_for_line(self, row);
2023 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2024 })
2025 .collect();
2026
2027 let preserve_preview = self.preserve_preview();
2028 self.edit(edits, None, cx);
2029 if preserve_preview {
2030 self.refresh_preview();
2031 }
2032 }
2033
2034 /// Create a minimal edit that will cause the given row to be indented
2035 /// with the given size. After applying this edit, the length of the line
2036 /// will always be at least `new_size.len`.
2037 pub fn edit_for_indent_size_adjustment(
2038 row: u32,
2039 current_size: IndentSize,
2040 new_size: IndentSize,
2041 ) -> Option<(Range<Point>, String)> {
2042 if new_size.kind == current_size.kind {
2043 match new_size.len.cmp(¤t_size.len) {
2044 Ordering::Greater => {
2045 let point = Point::new(row, 0);
2046 Some((
2047 point..point,
2048 iter::repeat(new_size.char())
2049 .take((new_size.len - current_size.len) as usize)
2050 .collect::<String>(),
2051 ))
2052 }
2053
2054 Ordering::Less => Some((
2055 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2056 String::new(),
2057 )),
2058
2059 Ordering::Equal => None,
2060 }
2061 } else {
2062 Some((
2063 Point::new(row, 0)..Point::new(row, current_size.len),
2064 iter::repeat(new_size.char())
2065 .take(new_size.len as usize)
2066 .collect::<String>(),
2067 ))
2068 }
2069 }
2070
2071 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2072 /// and the given new text.
2073 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2074 let old_text = self.as_rope().clone();
2075 let base_version = self.version();
2076 cx.background_executor()
2077 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2078 let old_text = old_text.to_string();
2079 let line_ending = LineEnding::detect(&new_text);
2080 LineEnding::normalize(&mut new_text);
2081 let edits = text_diff(&old_text, &new_text);
2082 Diff {
2083 base_version,
2084 line_ending,
2085 edits,
2086 }
2087 })
2088 }
2089
2090 /// Spawns a background task that searches the buffer for any whitespace
2091 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2092 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2093 let old_text = self.as_rope().clone();
2094 let line_ending = self.line_ending();
2095 let base_version = self.version();
2096 cx.background_spawn(async move {
2097 let ranges = trailing_whitespace_ranges(&old_text);
2098 let empty = Arc::<str>::from("");
2099 Diff {
2100 base_version,
2101 line_ending,
2102 edits: ranges
2103 .into_iter()
2104 .map(|range| (range, empty.clone()))
2105 .collect(),
2106 }
2107 })
2108 }
2109
2110 /// Ensures that the buffer ends with a single newline character, and
2111 /// no other whitespace. Skips if the buffer is empty.
2112 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2113 let len = self.len();
2114 if len == 0 {
2115 return;
2116 }
2117 let mut offset = len;
2118 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2119 let non_whitespace_len = chunk
2120 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2121 .len();
2122 offset -= chunk.len();
2123 offset += non_whitespace_len;
2124 if non_whitespace_len != 0 {
2125 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2126 return;
2127 }
2128 break;
2129 }
2130 }
2131 self.edit([(offset..len, "\n")], None, cx);
2132 }
2133
2134 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2135 /// calculated, then adjust the diff to account for those changes, and discard any
2136 /// parts of the diff that conflict with those changes.
2137 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2138 let snapshot = self.snapshot();
2139 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2140 let mut delta = 0;
2141 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2142 while let Some(edit_since) = edits_since.peek() {
2143 // If the edit occurs after a diff hunk, then it does not
2144 // affect that hunk.
2145 if edit_since.old.start > range.end {
2146 break;
2147 }
2148 // If the edit precedes the diff hunk, then adjust the hunk
2149 // to reflect the edit.
2150 else if edit_since.old.end < range.start {
2151 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2152 edits_since.next();
2153 }
2154 // If the edit intersects a diff hunk, then discard that hunk.
2155 else {
2156 return None;
2157 }
2158 }
2159
2160 let start = (range.start as i64 + delta) as usize;
2161 let end = (range.end as i64 + delta) as usize;
2162 Some((start..end, new_text))
2163 });
2164
2165 self.start_transaction();
2166 self.text.set_line_ending(diff.line_ending);
2167 self.edit(adjusted_edits, None, cx);
2168 self.end_transaction(cx)
2169 }
2170
2171 pub fn has_unsaved_edits(&self) -> bool {
2172 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2173
2174 if last_version == self.version {
2175 self.has_unsaved_edits
2176 .set((last_version, has_unsaved_edits));
2177 return has_unsaved_edits;
2178 }
2179
2180 let has_edits = self.has_edits_since(&self.saved_version);
2181 self.has_unsaved_edits
2182 .set((self.version.clone(), has_edits));
2183 has_edits
2184 }
2185
2186 /// Checks if the buffer has unsaved changes.
2187 pub fn is_dirty(&self) -> bool {
2188 if self.capability == Capability::ReadOnly {
2189 return false;
2190 }
2191 if self.has_conflict {
2192 return true;
2193 }
2194 match self.file.as_ref().map(|f| f.disk_state()) {
2195 Some(DiskState::New) | Some(DiskState::Deleted) => {
2196 !self.is_empty() && self.has_unsaved_edits()
2197 }
2198 _ => self.has_unsaved_edits(),
2199 }
2200 }
2201
2202 /// Marks the buffer as having a conflict regardless of current buffer state.
2203 pub fn set_conflict(&mut self) {
2204 self.has_conflict = true;
2205 }
2206
2207 /// Checks if the buffer and its file have both changed since the buffer
2208 /// was last saved or reloaded.
2209 pub fn has_conflict(&self) -> bool {
2210 if self.has_conflict {
2211 return true;
2212 }
2213 let Some(file) = self.file.as_ref() else {
2214 return false;
2215 };
2216 match file.disk_state() {
2217 DiskState::New => false,
2218 DiskState::Present { mtime } => match self.saved_mtime {
2219 Some(saved_mtime) => {
2220 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2221 }
2222 None => true,
2223 },
2224 DiskState::Deleted => false,
2225 }
2226 }
2227
2228 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2229 pub fn subscribe(&mut self) -> Subscription<usize> {
2230 self.text.subscribe()
2231 }
2232
2233 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2234 ///
2235 /// This allows downstream code to check if the buffer's text has changed without
2236 /// waiting for an effect cycle, which would be required if using eents.
2237 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2238 if let Err(ix) = self
2239 .change_bits
2240 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2241 {
2242 self.change_bits.insert(ix, bit);
2243 }
2244 }
2245
2246 /// Set the change bit for all "listeners".
2247 fn was_changed(&mut self) {
2248 self.change_bits.retain(|change_bit| {
2249 change_bit
2250 .upgrade()
2251 .inspect(|bit| {
2252 _ = bit.replace(true);
2253 })
2254 .is_some()
2255 });
2256 }
2257
2258 /// Starts a transaction, if one is not already in-progress. When undoing or
2259 /// redoing edits, all of the edits performed within a transaction are undone
2260 /// or redone together.
2261 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2262 self.start_transaction_at(Instant::now())
2263 }
2264
2265 /// Starts a transaction, providing the current time. Subsequent transactions
2266 /// that occur within a short period of time will be grouped together. This
2267 /// is controlled by the buffer's undo grouping duration.
2268 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2269 self.transaction_depth += 1;
2270 if self.was_dirty_before_starting_transaction.is_none() {
2271 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2272 }
2273 self.text.start_transaction_at(now)
2274 }
2275
2276 /// Terminates the current transaction, if this is the outermost transaction.
2277 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2278 self.end_transaction_at(Instant::now(), cx)
2279 }
2280
2281 /// Terminates the current transaction, providing the current time. Subsequent transactions
2282 /// that occur within a short period of time will be grouped together. This
2283 /// is controlled by the buffer's undo grouping duration.
2284 pub fn end_transaction_at(
2285 &mut self,
2286 now: Instant,
2287 cx: &mut Context<Self>,
2288 ) -> Option<TransactionId> {
2289 assert!(self.transaction_depth > 0);
2290 self.transaction_depth -= 1;
2291 let was_dirty = if self.transaction_depth == 0 {
2292 self.was_dirty_before_starting_transaction.take().unwrap()
2293 } else {
2294 false
2295 };
2296 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2297 self.did_edit(&start_version, was_dirty, cx);
2298 Some(transaction_id)
2299 } else {
2300 None
2301 }
2302 }
2303
2304 /// Manually add a transaction to the buffer's undo history.
2305 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2306 self.text.push_transaction(transaction, now);
2307 }
2308
2309 /// Differs from `push_transaction` in that it does not clear the redo
2310 /// stack. Intended to be used to create a parent transaction to merge
2311 /// potential child transactions into.
2312 ///
2313 /// The caller is responsible for removing it from the undo history using
2314 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2315 /// are merged into this transaction, the caller is responsible for ensuring
2316 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2317 /// cleared is to create transactions with the usual `start_transaction` and
2318 /// `end_transaction` methods and merging the resulting transactions into
2319 /// the transaction created by this method
2320 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2321 self.text.push_empty_transaction(now)
2322 }
2323
2324 /// Prevent the last transaction from being grouped with any subsequent transactions,
2325 /// even if they occur with the buffer's undo grouping duration.
2326 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2327 self.text.finalize_last_transaction()
2328 }
2329
2330 /// Manually group all changes since a given transaction.
2331 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2332 self.text.group_until_transaction(transaction_id);
2333 }
2334
2335 /// Manually remove a transaction from the buffer's undo history
2336 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2337 self.text.forget_transaction(transaction_id)
2338 }
2339
2340 /// Retrieve a transaction from the buffer's undo history
2341 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2342 self.text.get_transaction(transaction_id)
2343 }
2344
2345 /// Manually merge two transactions in the buffer's undo history.
2346 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2347 self.text.merge_transactions(transaction, destination);
2348 }
2349
2350 /// Waits for the buffer to receive operations with the given timestamps.
2351 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2352 &mut self,
2353 edit_ids: It,
2354 ) -> impl Future<Output = Result<()>> + use<It> {
2355 self.text.wait_for_edits(edit_ids)
2356 }
2357
2358 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2359 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2360 &mut self,
2361 anchors: It,
2362 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2363 self.text.wait_for_anchors(anchors)
2364 }
2365
2366 /// Waits for the buffer to receive operations up to the given version.
2367 pub fn wait_for_version(
2368 &mut self,
2369 version: clock::Global,
2370 ) -> impl Future<Output = Result<()>> + use<> {
2371 self.text.wait_for_version(version)
2372 }
2373
2374 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2375 /// [`Buffer::wait_for_version`] to resolve with an error.
2376 pub fn give_up_waiting(&mut self) {
2377 self.text.give_up_waiting();
2378 }
2379
2380 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2381 let mut rx = None;
2382 if !self.autoindent_requests.is_empty() {
2383 let channel = oneshot::channel();
2384 self.wait_for_autoindent_txs.push(channel.0);
2385 rx = Some(channel.1);
2386 }
2387 rx
2388 }
2389
2390 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2391 pub fn set_active_selections(
2392 &mut self,
2393 selections: Arc<[Selection<Anchor>]>,
2394 line_mode: bool,
2395 cursor_shape: CursorShape,
2396 cx: &mut Context<Self>,
2397 ) {
2398 let lamport_timestamp = self.text.lamport_clock.tick();
2399 self.remote_selections.insert(
2400 self.text.replica_id(),
2401 SelectionSet {
2402 selections: selections.clone(),
2403 lamport_timestamp,
2404 line_mode,
2405 cursor_shape,
2406 },
2407 );
2408 self.send_operation(
2409 Operation::UpdateSelections {
2410 selections,
2411 line_mode,
2412 lamport_timestamp,
2413 cursor_shape,
2414 },
2415 true,
2416 cx,
2417 );
2418 self.non_text_state_update_count += 1;
2419 cx.notify();
2420 }
2421
2422 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2423 /// this replica.
2424 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2425 if self
2426 .remote_selections
2427 .get(&self.text.replica_id())
2428 .is_none_or(|set| !set.selections.is_empty())
2429 {
2430 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2431 }
2432 }
2433
2434 pub fn set_agent_selections(
2435 &mut self,
2436 selections: Arc<[Selection<Anchor>]>,
2437 line_mode: bool,
2438 cursor_shape: CursorShape,
2439 cx: &mut Context<Self>,
2440 ) {
2441 let lamport_timestamp = self.text.lamport_clock.tick();
2442 self.remote_selections.insert(
2443 ReplicaId::AGENT,
2444 SelectionSet {
2445 selections,
2446 lamport_timestamp,
2447 line_mode,
2448 cursor_shape,
2449 },
2450 );
2451 self.non_text_state_update_count += 1;
2452 cx.notify();
2453 }
2454
2455 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2456 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2457 }
2458
2459 /// Replaces the buffer's entire text.
2460 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2461 where
2462 T: Into<Arc<str>>,
2463 {
2464 self.autoindent_requests.clear();
2465 self.edit([(0..self.len(), text)], None, cx)
2466 }
2467
2468 /// Appends the given text to the end of the buffer.
2469 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2470 where
2471 T: Into<Arc<str>>,
2472 {
2473 self.edit([(self.len()..self.len(), text)], None, cx)
2474 }
2475
2476 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2477 /// delete, and a string of text to insert at that location.
2478 ///
2479 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2480 /// request for the edited ranges, which will be processed when the buffer finishes
2481 /// parsing.
2482 ///
2483 /// Parsing takes place at the end of a transaction, and may compute synchronously
2484 /// or asynchronously, depending on the changes.
2485 pub fn edit<I, S, T>(
2486 &mut self,
2487 edits_iter: I,
2488 autoindent_mode: Option<AutoindentMode>,
2489 cx: &mut Context<Self>,
2490 ) -> Option<clock::Lamport>
2491 where
2492 I: IntoIterator<Item = (Range<S>, T)>,
2493 S: ToOffset,
2494 T: Into<Arc<str>>,
2495 {
2496 // Skip invalid edits and coalesce contiguous ones.
2497 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2498
2499 for (range, new_text) in edits_iter {
2500 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2501
2502 if range.start > range.end {
2503 mem::swap(&mut range.start, &mut range.end);
2504 }
2505 let new_text = new_text.into();
2506 if !new_text.is_empty() || !range.is_empty() {
2507 if let Some((prev_range, prev_text)) = edits.last_mut()
2508 && prev_range.end >= range.start
2509 {
2510 prev_range.end = cmp::max(prev_range.end, range.end);
2511 *prev_text = format!("{prev_text}{new_text}").into();
2512 } else {
2513 edits.push((range, new_text));
2514 }
2515 }
2516 }
2517 if edits.is_empty() {
2518 return None;
2519 }
2520
2521 self.start_transaction();
2522 self.pending_autoindent.take();
2523 let autoindent_request = autoindent_mode
2524 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2525
2526 let edit_operation = self.text.edit(edits.iter().cloned());
2527 let edit_id = edit_operation.timestamp();
2528
2529 if let Some((before_edit, mode)) = autoindent_request {
2530 let mut delta = 0isize;
2531 let mut previous_setting = None;
2532 let entries: Vec<_> = edits
2533 .into_iter()
2534 .enumerate()
2535 .zip(&edit_operation.as_edit().unwrap().new_text)
2536 .filter(|((_, (range, _)), _)| {
2537 let language = before_edit.language_at(range.start);
2538 let language_id = language.map(|l| l.id());
2539 if let Some((cached_language_id, auto_indent)) = previous_setting
2540 && cached_language_id == language_id
2541 {
2542 auto_indent
2543 } else {
2544 // The auto-indent setting is not present in editorconfigs, hence
2545 // we can avoid passing the file here.
2546 let auto_indent =
2547 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2548 previous_setting = Some((language_id, auto_indent));
2549 auto_indent
2550 }
2551 })
2552 .map(|((ix, (range, _)), new_text)| {
2553 let new_text_length = new_text.len();
2554 let old_start = range.start.to_point(&before_edit);
2555 let new_start = (delta + range.start as isize) as usize;
2556 let range_len = range.end - range.start;
2557 delta += new_text_length as isize - range_len as isize;
2558
2559 // Decide what range of the insertion to auto-indent, and whether
2560 // the first line of the insertion should be considered a newly-inserted line
2561 // or an edit to an existing line.
2562 let mut range_of_insertion_to_indent = 0..new_text_length;
2563 let mut first_line_is_new = true;
2564
2565 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2566 let old_line_end = before_edit.line_len(old_start.row);
2567
2568 if old_start.column > old_line_start {
2569 first_line_is_new = false;
2570 }
2571
2572 if !new_text.contains('\n')
2573 && (old_start.column + (range_len as u32) < old_line_end
2574 || old_line_end == old_line_start)
2575 {
2576 first_line_is_new = false;
2577 }
2578
2579 // When inserting text starting with a newline, avoid auto-indenting the
2580 // previous line.
2581 if new_text.starts_with('\n') {
2582 range_of_insertion_to_indent.start += 1;
2583 first_line_is_new = true;
2584 }
2585
2586 let mut original_indent_column = None;
2587 if let AutoindentMode::Block {
2588 original_indent_columns,
2589 } = &mode
2590 {
2591 original_indent_column = Some(if new_text.starts_with('\n') {
2592 indent_size_for_text(
2593 new_text[range_of_insertion_to_indent.clone()].chars(),
2594 )
2595 .len
2596 } else {
2597 original_indent_columns
2598 .get(ix)
2599 .copied()
2600 .flatten()
2601 .unwrap_or_else(|| {
2602 indent_size_for_text(
2603 new_text[range_of_insertion_to_indent.clone()].chars(),
2604 )
2605 .len
2606 })
2607 });
2608
2609 // Avoid auto-indenting the line after the edit.
2610 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2611 range_of_insertion_to_indent.end -= 1;
2612 }
2613 }
2614
2615 AutoindentRequestEntry {
2616 first_line_is_new,
2617 original_indent_column,
2618 indent_size: before_edit.language_indent_size_at(range.start, cx),
2619 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2620 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2621 }
2622 })
2623 .collect();
2624
2625 if !entries.is_empty() {
2626 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2627 before_edit,
2628 entries,
2629 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2630 ignore_empty_lines: false,
2631 }));
2632 }
2633 }
2634
2635 self.end_transaction(cx);
2636 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2637 Some(edit_id)
2638 }
2639
2640 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2641 self.was_changed();
2642
2643 if self.edits_since::<usize>(old_version).next().is_none() {
2644 return;
2645 }
2646
2647 self.reparse(cx, true);
2648 cx.emit(BufferEvent::Edited);
2649 if was_dirty != self.is_dirty() {
2650 cx.emit(BufferEvent::DirtyChanged);
2651 }
2652 cx.notify();
2653 }
2654
2655 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2656 where
2657 I: IntoIterator<Item = Range<T>>,
2658 T: ToOffset + Copy,
2659 {
2660 let before_edit = self.snapshot();
2661 let entries = ranges
2662 .into_iter()
2663 .map(|range| AutoindentRequestEntry {
2664 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2665 first_line_is_new: true,
2666 indent_size: before_edit.language_indent_size_at(range.start, cx),
2667 original_indent_column: None,
2668 })
2669 .collect();
2670 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2671 before_edit,
2672 entries,
2673 is_block_mode: false,
2674 ignore_empty_lines: true,
2675 }));
2676 self.request_autoindent(cx);
2677 }
2678
2679 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2680 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2681 pub fn insert_empty_line(
2682 &mut self,
2683 position: impl ToPoint,
2684 space_above: bool,
2685 space_below: bool,
2686 cx: &mut Context<Self>,
2687 ) -> Point {
2688 let mut position = position.to_point(self);
2689
2690 self.start_transaction();
2691
2692 self.edit(
2693 [(position..position, "\n")],
2694 Some(AutoindentMode::EachLine),
2695 cx,
2696 );
2697
2698 if position.column > 0 {
2699 position += Point::new(1, 0);
2700 }
2701
2702 if !self.is_line_blank(position.row) {
2703 self.edit(
2704 [(position..position, "\n")],
2705 Some(AutoindentMode::EachLine),
2706 cx,
2707 );
2708 }
2709
2710 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2711 self.edit(
2712 [(position..position, "\n")],
2713 Some(AutoindentMode::EachLine),
2714 cx,
2715 );
2716 position.row += 1;
2717 }
2718
2719 if space_below
2720 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2721 {
2722 self.edit(
2723 [(position..position, "\n")],
2724 Some(AutoindentMode::EachLine),
2725 cx,
2726 );
2727 }
2728
2729 self.end_transaction(cx);
2730
2731 position
2732 }
2733
2734 /// Applies the given remote operations to the buffer.
2735 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2736 self.pending_autoindent.take();
2737 let was_dirty = self.is_dirty();
2738 let old_version = self.version.clone();
2739 let mut deferred_ops = Vec::new();
2740 let buffer_ops = ops
2741 .into_iter()
2742 .filter_map(|op| match op {
2743 Operation::Buffer(op) => Some(op),
2744 _ => {
2745 if self.can_apply_op(&op) {
2746 self.apply_op(op, cx);
2747 } else {
2748 deferred_ops.push(op);
2749 }
2750 None
2751 }
2752 })
2753 .collect::<Vec<_>>();
2754 for operation in buffer_ops.iter() {
2755 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2756 }
2757 self.text.apply_ops(buffer_ops);
2758 self.deferred_ops.insert(deferred_ops);
2759 self.flush_deferred_ops(cx);
2760 self.did_edit(&old_version, was_dirty, cx);
2761 // Notify independently of whether the buffer was edited as the operations could include a
2762 // selection update.
2763 cx.notify();
2764 }
2765
2766 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2767 let mut deferred_ops = Vec::new();
2768 for op in self.deferred_ops.drain().iter().cloned() {
2769 if self.can_apply_op(&op) {
2770 self.apply_op(op, cx);
2771 } else {
2772 deferred_ops.push(op);
2773 }
2774 }
2775 self.deferred_ops.insert(deferred_ops);
2776 }
2777
2778 pub fn has_deferred_ops(&self) -> bool {
2779 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2780 }
2781
2782 fn can_apply_op(&self, operation: &Operation) -> bool {
2783 match operation {
2784 Operation::Buffer(_) => {
2785 unreachable!("buffer operations should never be applied at this layer")
2786 }
2787 Operation::UpdateDiagnostics {
2788 diagnostics: diagnostic_set,
2789 ..
2790 } => diagnostic_set.iter().all(|diagnostic| {
2791 self.text.can_resolve(&diagnostic.range.start)
2792 && self.text.can_resolve(&diagnostic.range.end)
2793 }),
2794 Operation::UpdateSelections { selections, .. } => selections
2795 .iter()
2796 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2797 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2798 }
2799 }
2800
2801 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2802 match operation {
2803 Operation::Buffer(_) => {
2804 unreachable!("buffer operations should never be applied at this layer")
2805 }
2806 Operation::UpdateDiagnostics {
2807 server_id,
2808 diagnostics: diagnostic_set,
2809 lamport_timestamp,
2810 } => {
2811 let snapshot = self.snapshot();
2812 self.apply_diagnostic_update(
2813 server_id,
2814 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2815 lamport_timestamp,
2816 cx,
2817 );
2818 }
2819 Operation::UpdateSelections {
2820 selections,
2821 lamport_timestamp,
2822 line_mode,
2823 cursor_shape,
2824 } => {
2825 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2826 && set.lamport_timestamp > lamport_timestamp
2827 {
2828 return;
2829 }
2830
2831 self.remote_selections.insert(
2832 lamport_timestamp.replica_id,
2833 SelectionSet {
2834 selections,
2835 lamport_timestamp,
2836 line_mode,
2837 cursor_shape,
2838 },
2839 );
2840 self.text.lamport_clock.observe(lamport_timestamp);
2841 self.non_text_state_update_count += 1;
2842 }
2843 Operation::UpdateCompletionTriggers {
2844 triggers,
2845 lamport_timestamp,
2846 server_id,
2847 } => {
2848 if triggers.is_empty() {
2849 self.completion_triggers_per_language_server
2850 .remove(&server_id);
2851 self.completion_triggers = self
2852 .completion_triggers_per_language_server
2853 .values()
2854 .flat_map(|triggers| triggers.iter().cloned())
2855 .collect();
2856 } else {
2857 self.completion_triggers_per_language_server
2858 .insert(server_id, triggers.iter().cloned().collect());
2859 self.completion_triggers.extend(triggers);
2860 }
2861 self.text.lamport_clock.observe(lamport_timestamp);
2862 }
2863 Operation::UpdateLineEnding {
2864 line_ending,
2865 lamport_timestamp,
2866 } => {
2867 self.text.set_line_ending(line_ending);
2868 self.text.lamport_clock.observe(lamport_timestamp);
2869 }
2870 }
2871 }
2872
2873 fn apply_diagnostic_update(
2874 &mut self,
2875 server_id: LanguageServerId,
2876 diagnostics: DiagnosticSet,
2877 lamport_timestamp: clock::Lamport,
2878 cx: &mut Context<Self>,
2879 ) {
2880 if lamport_timestamp > self.diagnostics_timestamp {
2881 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2882 if diagnostics.is_empty() {
2883 if let Ok(ix) = ix {
2884 self.diagnostics.remove(ix);
2885 }
2886 } else {
2887 match ix {
2888 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2889 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2890 };
2891 }
2892 self.diagnostics_timestamp = lamport_timestamp;
2893 self.non_text_state_update_count += 1;
2894 self.text.lamport_clock.observe(lamport_timestamp);
2895 cx.notify();
2896 cx.emit(BufferEvent::DiagnosticsUpdated);
2897 }
2898 }
2899
2900 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2901 self.was_changed();
2902 cx.emit(BufferEvent::Operation {
2903 operation,
2904 is_local,
2905 });
2906 }
2907
2908 /// Removes the selections for a given peer.
2909 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2910 self.remote_selections.remove(&replica_id);
2911 cx.notify();
2912 }
2913
2914 /// Undoes the most recent transaction.
2915 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2916 let was_dirty = self.is_dirty();
2917 let old_version = self.version.clone();
2918
2919 if let Some((transaction_id, operation)) = self.text.undo() {
2920 self.send_operation(Operation::Buffer(operation), true, cx);
2921 self.did_edit(&old_version, was_dirty, cx);
2922 Some(transaction_id)
2923 } else {
2924 None
2925 }
2926 }
2927
2928 /// Manually undoes a specific transaction in the buffer's undo history.
2929 pub fn undo_transaction(
2930 &mut self,
2931 transaction_id: TransactionId,
2932 cx: &mut Context<Self>,
2933 ) -> bool {
2934 let was_dirty = self.is_dirty();
2935 let old_version = self.version.clone();
2936 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2937 self.send_operation(Operation::Buffer(operation), true, cx);
2938 self.did_edit(&old_version, was_dirty, cx);
2939 true
2940 } else {
2941 false
2942 }
2943 }
2944
2945 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2946 pub fn undo_to_transaction(
2947 &mut self,
2948 transaction_id: TransactionId,
2949 cx: &mut Context<Self>,
2950 ) -> bool {
2951 let was_dirty = self.is_dirty();
2952 let old_version = self.version.clone();
2953
2954 let operations = self.text.undo_to_transaction(transaction_id);
2955 let undone = !operations.is_empty();
2956 for operation in operations {
2957 self.send_operation(Operation::Buffer(operation), true, cx);
2958 }
2959 if undone {
2960 self.did_edit(&old_version, was_dirty, cx)
2961 }
2962 undone
2963 }
2964
2965 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2966 let was_dirty = self.is_dirty();
2967 let operation = self.text.undo_operations(counts);
2968 let old_version = self.version.clone();
2969 self.send_operation(Operation::Buffer(operation), true, cx);
2970 self.did_edit(&old_version, was_dirty, cx);
2971 }
2972
2973 /// Manually redoes a specific transaction in the buffer's redo history.
2974 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2975 let was_dirty = self.is_dirty();
2976 let old_version = self.version.clone();
2977
2978 if let Some((transaction_id, operation)) = self.text.redo() {
2979 self.send_operation(Operation::Buffer(operation), true, cx);
2980 self.did_edit(&old_version, was_dirty, cx);
2981 Some(transaction_id)
2982 } else {
2983 None
2984 }
2985 }
2986
2987 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2988 pub fn redo_to_transaction(
2989 &mut self,
2990 transaction_id: TransactionId,
2991 cx: &mut Context<Self>,
2992 ) -> bool {
2993 let was_dirty = self.is_dirty();
2994 let old_version = self.version.clone();
2995
2996 let operations = self.text.redo_to_transaction(transaction_id);
2997 let redone = !operations.is_empty();
2998 for operation in operations {
2999 self.send_operation(Operation::Buffer(operation), true, cx);
3000 }
3001 if redone {
3002 self.did_edit(&old_version, was_dirty, cx)
3003 }
3004 redone
3005 }
3006
3007 /// Override current completion triggers with the user-provided completion triggers.
3008 pub fn set_completion_triggers(
3009 &mut self,
3010 server_id: LanguageServerId,
3011 triggers: BTreeSet<String>,
3012 cx: &mut Context<Self>,
3013 ) {
3014 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3015 if triggers.is_empty() {
3016 self.completion_triggers_per_language_server
3017 .remove(&server_id);
3018 self.completion_triggers = self
3019 .completion_triggers_per_language_server
3020 .values()
3021 .flat_map(|triggers| triggers.iter().cloned())
3022 .collect();
3023 } else {
3024 self.completion_triggers_per_language_server
3025 .insert(server_id, triggers.clone());
3026 self.completion_triggers.extend(triggers.iter().cloned());
3027 }
3028 self.send_operation(
3029 Operation::UpdateCompletionTriggers {
3030 triggers: triggers.into_iter().collect(),
3031 lamport_timestamp: self.completion_triggers_timestamp,
3032 server_id,
3033 },
3034 true,
3035 cx,
3036 );
3037 cx.notify();
3038 }
3039
3040 /// Returns a list of strings which trigger a completion menu for this language.
3041 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3042 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3043 &self.completion_triggers
3044 }
3045
3046 /// Call this directly after performing edits to prevent the preview tab
3047 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3048 /// to return false until there are additional edits.
3049 pub fn refresh_preview(&mut self) {
3050 self.preview_version = self.version.clone();
3051 }
3052
3053 /// Whether we should preserve the preview status of a tab containing this buffer.
3054 pub fn preserve_preview(&self) -> bool {
3055 !self.has_edits_since(&self.preview_version)
3056 }
3057}
3058
3059#[doc(hidden)]
3060#[cfg(any(test, feature = "test-support"))]
3061impl Buffer {
3062 pub fn edit_via_marked_text(
3063 &mut self,
3064 marked_string: &str,
3065 autoindent_mode: Option<AutoindentMode>,
3066 cx: &mut Context<Self>,
3067 ) {
3068 let edits = self.edits_for_marked_text(marked_string);
3069 self.edit(edits, autoindent_mode, cx);
3070 }
3071
3072 pub fn set_group_interval(&mut self, group_interval: Duration) {
3073 self.text.set_group_interval(group_interval);
3074 }
3075
3076 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3077 where
3078 T: rand::Rng,
3079 {
3080 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3081 let mut last_end = None;
3082 for _ in 0..old_range_count {
3083 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3084 break;
3085 }
3086
3087 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3088 let mut range = self.random_byte_range(new_start, rng);
3089 if rng.random_bool(0.2) {
3090 mem::swap(&mut range.start, &mut range.end);
3091 }
3092 last_end = Some(range.end);
3093
3094 let new_text_len = rng.random_range(0..10);
3095 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3096 new_text = new_text.to_uppercase();
3097
3098 edits.push((range, new_text));
3099 }
3100 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3101 self.edit(edits, None, cx);
3102 }
3103
3104 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3105 let was_dirty = self.is_dirty();
3106 let old_version = self.version.clone();
3107
3108 let ops = self.text.randomly_undo_redo(rng);
3109 if !ops.is_empty() {
3110 for op in ops {
3111 self.send_operation(Operation::Buffer(op), true, cx);
3112 self.did_edit(&old_version, was_dirty, cx);
3113 }
3114 }
3115 }
3116}
3117
3118impl EventEmitter<BufferEvent> for Buffer {}
3119
3120impl Deref for Buffer {
3121 type Target = TextBuffer;
3122
3123 fn deref(&self) -> &Self::Target {
3124 &self.text
3125 }
3126}
3127
3128impl BufferSnapshot {
3129 /// Returns [`IndentSize`] for a given line that respects user settings and
3130 /// language preferences.
3131 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3132 indent_size_for_line(self, row)
3133 }
3134
3135 /// Returns [`IndentSize`] for a given position that respects user settings
3136 /// and language preferences.
3137 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3138 let settings = language_settings(
3139 self.language_at(position).map(|l| l.name()),
3140 self.file(),
3141 cx,
3142 );
3143 if settings.hard_tabs {
3144 IndentSize::tab()
3145 } else {
3146 IndentSize::spaces(settings.tab_size.get())
3147 }
3148 }
3149
3150 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3151 /// is passed in as `single_indent_size`.
3152 pub fn suggested_indents(
3153 &self,
3154 rows: impl Iterator<Item = u32>,
3155 single_indent_size: IndentSize,
3156 ) -> BTreeMap<u32, IndentSize> {
3157 let mut result = BTreeMap::new();
3158
3159 for row_range in contiguous_ranges(rows, 10) {
3160 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3161 Some(suggestions) => suggestions,
3162 _ => break,
3163 };
3164
3165 for (row, suggestion) in row_range.zip(suggestions) {
3166 let indent_size = if let Some(suggestion) = suggestion {
3167 result
3168 .get(&suggestion.basis_row)
3169 .copied()
3170 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3171 .with_delta(suggestion.delta, single_indent_size)
3172 } else {
3173 self.indent_size_for_line(row)
3174 };
3175
3176 result.insert(row, indent_size);
3177 }
3178 }
3179
3180 result
3181 }
3182
3183 fn suggest_autoindents(
3184 &self,
3185 row_range: Range<u32>,
3186 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3187 let config = &self.language.as_ref()?.config;
3188 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3189
3190 #[derive(Debug, Clone)]
3191 struct StartPosition {
3192 start: Point,
3193 suffix: SharedString,
3194 }
3195
3196 // Find the suggested indentation ranges based on the syntax tree.
3197 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3198 let end = Point::new(row_range.end, 0);
3199 let range = (start..end).to_offset(&self.text);
3200 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3201 Some(&grammar.indents_config.as_ref()?.query)
3202 });
3203 let indent_configs = matches
3204 .grammars()
3205 .iter()
3206 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3207 .collect::<Vec<_>>();
3208
3209 let mut indent_ranges = Vec::<Range<Point>>::new();
3210 let mut start_positions = Vec::<StartPosition>::new();
3211 let mut outdent_positions = Vec::<Point>::new();
3212 while let Some(mat) = matches.peek() {
3213 let mut start: Option<Point> = None;
3214 let mut end: Option<Point> = None;
3215
3216 let config = indent_configs[mat.grammar_index];
3217 for capture in mat.captures {
3218 if capture.index == config.indent_capture_ix {
3219 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3220 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3221 } else if Some(capture.index) == config.start_capture_ix {
3222 start = Some(Point::from_ts_point(capture.node.end_position()));
3223 } else if Some(capture.index) == config.end_capture_ix {
3224 end = Some(Point::from_ts_point(capture.node.start_position()));
3225 } else if Some(capture.index) == config.outdent_capture_ix {
3226 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3227 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3228 start_positions.push(StartPosition {
3229 start: Point::from_ts_point(capture.node.start_position()),
3230 suffix: suffix.clone(),
3231 });
3232 }
3233 }
3234
3235 matches.advance();
3236 if let Some((start, end)) = start.zip(end) {
3237 if start.row == end.row {
3238 continue;
3239 }
3240 let range = start..end;
3241 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3242 Err(ix) => indent_ranges.insert(ix, range),
3243 Ok(ix) => {
3244 let prev_range = &mut indent_ranges[ix];
3245 prev_range.end = prev_range.end.max(range.end);
3246 }
3247 }
3248 }
3249 }
3250
3251 let mut error_ranges = Vec::<Range<Point>>::new();
3252 let mut matches = self
3253 .syntax
3254 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3255 while let Some(mat) = matches.peek() {
3256 let node = mat.captures[0].node;
3257 let start = Point::from_ts_point(node.start_position());
3258 let end = Point::from_ts_point(node.end_position());
3259 let range = start..end;
3260 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3261 Ok(ix) | Err(ix) => ix,
3262 };
3263 let mut end_ix = ix;
3264 while let Some(existing_range) = error_ranges.get(end_ix) {
3265 if existing_range.end < end {
3266 end_ix += 1;
3267 } else {
3268 break;
3269 }
3270 }
3271 error_ranges.splice(ix..end_ix, [range]);
3272 matches.advance();
3273 }
3274
3275 outdent_positions.sort();
3276 for outdent_position in outdent_positions {
3277 // find the innermost indent range containing this outdent_position
3278 // set its end to the outdent position
3279 if let Some(range_to_truncate) = indent_ranges
3280 .iter_mut()
3281 .filter(|indent_range| indent_range.contains(&outdent_position))
3282 .next_back()
3283 {
3284 range_to_truncate.end = outdent_position;
3285 }
3286 }
3287
3288 start_positions.sort_by_key(|b| b.start);
3289
3290 // Find the suggested indentation increases and decreased based on regexes.
3291 let mut regex_outdent_map = HashMap::default();
3292 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3293 let mut start_positions_iter = start_positions.iter().peekable();
3294
3295 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3296 self.for_each_line(
3297 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3298 ..Point::new(row_range.end, 0),
3299 |row, line| {
3300 if config
3301 .decrease_indent_pattern
3302 .as_ref()
3303 .is_some_and(|regex| regex.is_match(line))
3304 {
3305 indent_change_rows.push((row, Ordering::Less));
3306 }
3307 if config
3308 .increase_indent_pattern
3309 .as_ref()
3310 .is_some_and(|regex| regex.is_match(line))
3311 {
3312 indent_change_rows.push((row + 1, Ordering::Greater));
3313 }
3314 while let Some(pos) = start_positions_iter.peek() {
3315 if pos.start.row < row {
3316 let pos = start_positions_iter.next().unwrap();
3317 last_seen_suffix
3318 .entry(pos.suffix.to_string())
3319 .or_default()
3320 .push(pos.start);
3321 } else {
3322 break;
3323 }
3324 }
3325 for rule in &config.decrease_indent_patterns {
3326 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3327 let row_start_column = self.indent_size_for_line(row).len;
3328 let basis_row = rule
3329 .valid_after
3330 .iter()
3331 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3332 .flatten()
3333 .filter(|start_point| start_point.column <= row_start_column)
3334 .max_by_key(|start_point| start_point.row);
3335 if let Some(outdent_to_row) = basis_row {
3336 regex_outdent_map.insert(row, outdent_to_row.row);
3337 }
3338 break;
3339 }
3340 }
3341 },
3342 );
3343
3344 let mut indent_changes = indent_change_rows.into_iter().peekable();
3345 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3346 prev_non_blank_row.unwrap_or(0)
3347 } else {
3348 row_range.start.saturating_sub(1)
3349 };
3350
3351 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3352 Some(row_range.map(move |row| {
3353 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3354
3355 let mut indent_from_prev_row = false;
3356 let mut outdent_from_prev_row = false;
3357 let mut outdent_to_row = u32::MAX;
3358 let mut from_regex = false;
3359
3360 while let Some((indent_row, delta)) = indent_changes.peek() {
3361 match indent_row.cmp(&row) {
3362 Ordering::Equal => match delta {
3363 Ordering::Less => {
3364 from_regex = true;
3365 outdent_from_prev_row = true
3366 }
3367 Ordering::Greater => {
3368 indent_from_prev_row = true;
3369 from_regex = true
3370 }
3371 _ => {}
3372 },
3373
3374 Ordering::Greater => break,
3375 Ordering::Less => {}
3376 }
3377
3378 indent_changes.next();
3379 }
3380
3381 for range in &indent_ranges {
3382 if range.start.row >= row {
3383 break;
3384 }
3385 if range.start.row == prev_row && range.end > row_start {
3386 indent_from_prev_row = true;
3387 }
3388 if range.end > prev_row_start && range.end <= row_start {
3389 outdent_to_row = outdent_to_row.min(range.start.row);
3390 }
3391 }
3392
3393 if let Some(basis_row) = regex_outdent_map.get(&row) {
3394 indent_from_prev_row = false;
3395 outdent_to_row = *basis_row;
3396 from_regex = true;
3397 }
3398
3399 let within_error = error_ranges
3400 .iter()
3401 .any(|e| e.start.row < row && e.end > row_start);
3402
3403 let suggestion = if outdent_to_row == prev_row
3404 || (outdent_from_prev_row && indent_from_prev_row)
3405 {
3406 Some(IndentSuggestion {
3407 basis_row: prev_row,
3408 delta: Ordering::Equal,
3409 within_error: within_error && !from_regex,
3410 })
3411 } else if indent_from_prev_row {
3412 Some(IndentSuggestion {
3413 basis_row: prev_row,
3414 delta: Ordering::Greater,
3415 within_error: within_error && !from_regex,
3416 })
3417 } else if outdent_to_row < prev_row {
3418 Some(IndentSuggestion {
3419 basis_row: outdent_to_row,
3420 delta: Ordering::Equal,
3421 within_error: within_error && !from_regex,
3422 })
3423 } else if outdent_from_prev_row {
3424 Some(IndentSuggestion {
3425 basis_row: prev_row,
3426 delta: Ordering::Less,
3427 within_error: within_error && !from_regex,
3428 })
3429 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3430 {
3431 Some(IndentSuggestion {
3432 basis_row: prev_row,
3433 delta: Ordering::Equal,
3434 within_error: within_error && !from_regex,
3435 })
3436 } else {
3437 None
3438 };
3439
3440 prev_row = row;
3441 prev_row_start = row_start;
3442 suggestion
3443 }))
3444 }
3445
3446 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3447 while row > 0 {
3448 row -= 1;
3449 if !self.is_line_blank(row) {
3450 return Some(row);
3451 }
3452 }
3453 None
3454 }
3455
3456 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3457 let captures = self.syntax.captures(range, &self.text, |grammar| {
3458 grammar
3459 .highlights_config
3460 .as_ref()
3461 .map(|config| &config.query)
3462 });
3463 let highlight_maps = captures
3464 .grammars()
3465 .iter()
3466 .map(|grammar| grammar.highlight_map())
3467 .collect();
3468 (captures, highlight_maps)
3469 }
3470
3471 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3472 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3473 /// returned in chunks where each chunk has a single syntax highlighting style and
3474 /// diagnostic status.
3475 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3476 let range = range.start.to_offset(self)..range.end.to_offset(self);
3477
3478 let mut syntax = None;
3479 if language_aware {
3480 syntax = Some(self.get_highlights(range.clone()));
3481 }
3482 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3483 let diagnostics = language_aware;
3484 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3485 }
3486
3487 pub fn highlighted_text_for_range<T: ToOffset>(
3488 &self,
3489 range: Range<T>,
3490 override_style: Option<HighlightStyle>,
3491 syntax_theme: &SyntaxTheme,
3492 ) -> HighlightedText {
3493 HighlightedText::from_buffer_range(
3494 range,
3495 &self.text,
3496 &self.syntax,
3497 override_style,
3498 syntax_theme,
3499 )
3500 }
3501
3502 /// Invokes the given callback for each line of text in the given range of the buffer.
3503 /// Uses callback to avoid allocating a string for each line.
3504 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3505 let mut line = String::new();
3506 let mut row = range.start.row;
3507 for chunk in self
3508 .as_rope()
3509 .chunks_in_range(range.to_offset(self))
3510 .chain(["\n"])
3511 {
3512 for (newline_ix, text) in chunk.split('\n').enumerate() {
3513 if newline_ix > 0 {
3514 callback(row, &line);
3515 row += 1;
3516 line.clear();
3517 }
3518 line.push_str(text);
3519 }
3520 }
3521 }
3522
3523 /// Iterates over every [`SyntaxLayer`] in the buffer.
3524 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3525 self.syntax_layers_for_range(0..self.len(), true)
3526 }
3527
3528 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3529 let offset = position.to_offset(self);
3530 self.syntax_layers_for_range(offset..offset, false)
3531 .filter(|l| {
3532 if let Some(ranges) = l.included_sub_ranges {
3533 ranges.iter().any(|range| {
3534 let start = range.start.to_offset(self);
3535 start <= offset && {
3536 let end = range.end.to_offset(self);
3537 offset < end
3538 }
3539 })
3540 } else {
3541 l.node().start_byte() <= offset && l.node().end_byte() > offset
3542 }
3543 })
3544 .last()
3545 }
3546
3547 pub fn syntax_layers_for_range<D: ToOffset>(
3548 &self,
3549 range: Range<D>,
3550 include_hidden: bool,
3551 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3552 self.syntax
3553 .layers_for_range(range, &self.text, include_hidden)
3554 }
3555
3556 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3557 &self,
3558 range: Range<D>,
3559 ) -> Option<SyntaxLayer<'_>> {
3560 let range = range.to_offset(self);
3561 self.syntax
3562 .layers_for_range(range, &self.text, false)
3563 .max_by(|a, b| {
3564 if a.depth != b.depth {
3565 a.depth.cmp(&b.depth)
3566 } else if a.offset.0 != b.offset.0 {
3567 a.offset.0.cmp(&b.offset.0)
3568 } else {
3569 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3570 }
3571 })
3572 }
3573
3574 /// Returns the main [`Language`].
3575 pub fn language(&self) -> Option<&Arc<Language>> {
3576 self.language.as_ref()
3577 }
3578
3579 /// Returns the [`Language`] at the given location.
3580 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3581 self.syntax_layer_at(position)
3582 .map(|info| info.language)
3583 .or(self.language.as_ref())
3584 }
3585
3586 /// Returns the settings for the language at the given location.
3587 pub fn settings_at<'a, D: ToOffset>(
3588 &'a self,
3589 position: D,
3590 cx: &'a App,
3591 ) -> Cow<'a, LanguageSettings> {
3592 language_settings(
3593 self.language_at(position).map(|l| l.name()),
3594 self.file.as_ref(),
3595 cx,
3596 )
3597 }
3598
3599 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3600 CharClassifier::new(self.language_scope_at(point))
3601 }
3602
3603 /// Returns the [`LanguageScope`] at the given location.
3604 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3605 let offset = position.to_offset(self);
3606 let mut scope = None;
3607 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3608
3609 // Use the layer that has the smallest node intersecting the given point.
3610 for layer in self
3611 .syntax
3612 .layers_for_range(offset..offset, &self.text, false)
3613 {
3614 let mut cursor = layer.node().walk();
3615
3616 let mut range = None;
3617 loop {
3618 let child_range = cursor.node().byte_range();
3619 if !child_range.contains(&offset) {
3620 break;
3621 }
3622
3623 range = Some(child_range);
3624 if cursor.goto_first_child_for_byte(offset).is_none() {
3625 break;
3626 }
3627 }
3628
3629 if let Some(range) = range
3630 && smallest_range_and_depth.as_ref().is_none_or(
3631 |(smallest_range, smallest_range_depth)| {
3632 if layer.depth > *smallest_range_depth {
3633 true
3634 } else if layer.depth == *smallest_range_depth {
3635 range.len() < smallest_range.len()
3636 } else {
3637 false
3638 }
3639 },
3640 )
3641 {
3642 smallest_range_and_depth = Some((range, layer.depth));
3643 scope = Some(LanguageScope {
3644 language: layer.language.clone(),
3645 override_id: layer.override_id(offset, &self.text),
3646 });
3647 }
3648 }
3649
3650 scope.or_else(|| {
3651 self.language.clone().map(|language| LanguageScope {
3652 language,
3653 override_id: None,
3654 })
3655 })
3656 }
3657
3658 /// Returns a tuple of the range and character kind of the word
3659 /// surrounding the given position.
3660 pub fn surrounding_word<T: ToOffset>(
3661 &self,
3662 start: T,
3663 scope_context: Option<CharScopeContext>,
3664 ) -> (Range<usize>, Option<CharKind>) {
3665 let mut start = start.to_offset(self);
3666 let mut end = start;
3667 let mut next_chars = self.chars_at(start).take(128).peekable();
3668 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3669
3670 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3671 let word_kind = cmp::max(
3672 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3673 next_chars.peek().copied().map(|c| classifier.kind(c)),
3674 );
3675
3676 for ch in prev_chars {
3677 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3678 start -= ch.len_utf8();
3679 } else {
3680 break;
3681 }
3682 }
3683
3684 for ch in next_chars {
3685 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3686 end += ch.len_utf8();
3687 } else {
3688 break;
3689 }
3690 }
3691
3692 (start..end, word_kind)
3693 }
3694
3695 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3696 /// range. When `require_larger` is true, the node found must be larger than the query range.
3697 ///
3698 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3699 /// be moved to the root of the tree.
3700 fn goto_node_enclosing_range(
3701 cursor: &mut tree_sitter::TreeCursor,
3702 query_range: &Range<usize>,
3703 require_larger: bool,
3704 ) -> bool {
3705 let mut ascending = false;
3706 loop {
3707 let mut range = cursor.node().byte_range();
3708 if query_range.is_empty() {
3709 // When the query range is empty and the current node starts after it, move to the
3710 // previous sibling to find the node the containing node.
3711 if range.start > query_range.start {
3712 cursor.goto_previous_sibling();
3713 range = cursor.node().byte_range();
3714 }
3715 } else {
3716 // When the query range is non-empty and the current node ends exactly at the start,
3717 // move to the next sibling to find a node that extends beyond the start.
3718 if range.end == query_range.start {
3719 cursor.goto_next_sibling();
3720 range = cursor.node().byte_range();
3721 }
3722 }
3723
3724 let encloses = range.contains_inclusive(query_range)
3725 && (!require_larger || range.len() > query_range.len());
3726 if !encloses {
3727 ascending = true;
3728 if !cursor.goto_parent() {
3729 return false;
3730 }
3731 continue;
3732 } else if ascending {
3733 return true;
3734 }
3735
3736 // Descend into the current node.
3737 if cursor
3738 .goto_first_child_for_byte(query_range.start)
3739 .is_none()
3740 {
3741 return true;
3742 }
3743 }
3744 }
3745
3746 pub fn syntax_ancestor<'a, T: ToOffset>(
3747 &'a self,
3748 range: Range<T>,
3749 ) -> Option<tree_sitter::Node<'a>> {
3750 let range = range.start.to_offset(self)..range.end.to_offset(self);
3751 let mut result: Option<tree_sitter::Node<'a>> = None;
3752 for layer in self
3753 .syntax
3754 .layers_for_range(range.clone(), &self.text, true)
3755 {
3756 let mut cursor = layer.node().walk();
3757
3758 // Find the node that both contains the range and is larger than it.
3759 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3760 continue;
3761 }
3762
3763 let left_node = cursor.node();
3764 let mut layer_result = left_node;
3765
3766 // For an empty range, try to find another node immediately to the right of the range.
3767 if left_node.end_byte() == range.start {
3768 let mut right_node = None;
3769 while !cursor.goto_next_sibling() {
3770 if !cursor.goto_parent() {
3771 break;
3772 }
3773 }
3774
3775 while cursor.node().start_byte() == range.start {
3776 right_node = Some(cursor.node());
3777 if !cursor.goto_first_child() {
3778 break;
3779 }
3780 }
3781
3782 // If there is a candidate node on both sides of the (empty) range, then
3783 // decide between the two by favoring a named node over an anonymous token.
3784 // If both nodes are the same in that regard, favor the right one.
3785 if let Some(right_node) = right_node
3786 && (right_node.is_named() || !left_node.is_named())
3787 {
3788 layer_result = right_node;
3789 }
3790 }
3791
3792 if let Some(previous_result) = &result
3793 && previous_result.byte_range().len() < layer_result.byte_range().len()
3794 {
3795 continue;
3796 }
3797 result = Some(layer_result);
3798 }
3799
3800 result
3801 }
3802
3803 /// Find the previous sibling syntax node at the given range.
3804 ///
3805 /// This function locates the syntax node that precedes the node containing
3806 /// the given range. It searches hierarchically by:
3807 /// 1. Finding the node that contains the given range
3808 /// 2. Looking for the previous sibling at the same tree level
3809 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3810 ///
3811 /// Returns `None` if there is no previous sibling at any ancestor level.
3812 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3813 &'a self,
3814 range: Range<T>,
3815 ) -> Option<tree_sitter::Node<'a>> {
3816 let range = range.start.to_offset(self)..range.end.to_offset(self);
3817 let mut result: Option<tree_sitter::Node<'a>> = None;
3818
3819 for layer in self
3820 .syntax
3821 .layers_for_range(range.clone(), &self.text, true)
3822 {
3823 let mut cursor = layer.node().walk();
3824
3825 // Find the node that contains the range
3826 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3827 continue;
3828 }
3829
3830 // Look for the previous sibling, moving up ancestor levels if needed
3831 loop {
3832 if cursor.goto_previous_sibling() {
3833 let layer_result = cursor.node();
3834
3835 if let Some(previous_result) = &result {
3836 if previous_result.byte_range().end < layer_result.byte_range().end {
3837 continue;
3838 }
3839 }
3840 result = Some(layer_result);
3841 break;
3842 }
3843
3844 // No sibling found at this level, try moving up to parent
3845 if !cursor.goto_parent() {
3846 break;
3847 }
3848 }
3849 }
3850
3851 result
3852 }
3853
3854 /// Find the next sibling syntax node at the given range.
3855 ///
3856 /// This function locates the syntax node that follows the node containing
3857 /// the given range. It searches hierarchically by:
3858 /// 1. Finding the node that contains the given range
3859 /// 2. Looking for the next sibling at the same tree level
3860 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3861 ///
3862 /// Returns `None` if there is no next sibling at any ancestor level.
3863 pub fn syntax_next_sibling<'a, T: ToOffset>(
3864 &'a self,
3865 range: Range<T>,
3866 ) -> Option<tree_sitter::Node<'a>> {
3867 let range = range.start.to_offset(self)..range.end.to_offset(self);
3868 let mut result: Option<tree_sitter::Node<'a>> = None;
3869
3870 for layer in self
3871 .syntax
3872 .layers_for_range(range.clone(), &self.text, true)
3873 {
3874 let mut cursor = layer.node().walk();
3875
3876 // Find the node that contains the range
3877 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3878 continue;
3879 }
3880
3881 // Look for the next sibling, moving up ancestor levels if needed
3882 loop {
3883 if cursor.goto_next_sibling() {
3884 let layer_result = cursor.node();
3885
3886 if let Some(previous_result) = &result {
3887 if previous_result.byte_range().start > layer_result.byte_range().start {
3888 continue;
3889 }
3890 }
3891 result = Some(layer_result);
3892 break;
3893 }
3894
3895 // No sibling found at this level, try moving up to parent
3896 if !cursor.goto_parent() {
3897 break;
3898 }
3899 }
3900 }
3901
3902 result
3903 }
3904
3905 /// Returns the root syntax node within the given row
3906 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3907 let start_offset = position.to_offset(self);
3908
3909 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3910
3911 let layer = self
3912 .syntax
3913 .layers_for_range(start_offset..start_offset, &self.text, true)
3914 .next()?;
3915
3916 let mut cursor = layer.node().walk();
3917
3918 // Descend to the first leaf that touches the start of the range.
3919 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3920 if cursor.node().end_byte() == start_offset {
3921 cursor.goto_next_sibling();
3922 }
3923 }
3924
3925 // Ascend to the root node within the same row.
3926 while cursor.goto_parent() {
3927 if cursor.node().start_position().row != row {
3928 break;
3929 }
3930 }
3931
3932 Some(cursor.node())
3933 }
3934
3935 /// Returns the outline for the buffer.
3936 ///
3937 /// This method allows passing an optional [`SyntaxTheme`] to
3938 /// syntax-highlight the returned symbols.
3939 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3940 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3941 }
3942
3943 /// Returns all the symbols that contain the given position.
3944 ///
3945 /// This method allows passing an optional [`SyntaxTheme`] to
3946 /// syntax-highlight the returned symbols.
3947 pub fn symbols_containing<T: ToOffset>(
3948 &self,
3949 position: T,
3950 theme: Option<&SyntaxTheme>,
3951 ) -> Vec<OutlineItem<Anchor>> {
3952 let position = position.to_offset(self);
3953 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3954 let end = self.clip_offset(position + 1, Bias::Right);
3955 let mut items = self.outline_items_containing(start..end, false, theme);
3956 let mut prev_depth = None;
3957 items.retain(|item| {
3958 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3959 prev_depth = Some(item.depth);
3960 result
3961 });
3962 items
3963 }
3964
3965 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3966 let range = range.to_offset(self);
3967 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3968 grammar.outline_config.as_ref().map(|c| &c.query)
3969 });
3970 let configs = matches
3971 .grammars()
3972 .iter()
3973 .map(|g| g.outline_config.as_ref().unwrap())
3974 .collect::<Vec<_>>();
3975
3976 while let Some(mat) = matches.peek() {
3977 let config = &configs[mat.grammar_index];
3978 let containing_item_node = maybe!({
3979 let item_node = mat.captures.iter().find_map(|cap| {
3980 if cap.index == config.item_capture_ix {
3981 Some(cap.node)
3982 } else {
3983 None
3984 }
3985 })?;
3986
3987 let item_byte_range = item_node.byte_range();
3988 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3989 None
3990 } else {
3991 Some(item_node)
3992 }
3993 });
3994
3995 if let Some(item_node) = containing_item_node {
3996 return Some(
3997 Point::from_ts_point(item_node.start_position())
3998 ..Point::from_ts_point(item_node.end_position()),
3999 );
4000 }
4001
4002 matches.advance();
4003 }
4004 None
4005 }
4006
4007 pub fn outline_items_containing<T: ToOffset>(
4008 &self,
4009 range: Range<T>,
4010 include_extra_context: bool,
4011 theme: Option<&SyntaxTheme>,
4012 ) -> Vec<OutlineItem<Anchor>> {
4013 self.outline_items_containing_internal(
4014 range,
4015 include_extra_context,
4016 theme,
4017 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4018 )
4019 }
4020
4021 pub fn outline_items_as_points_containing<T: ToOffset>(
4022 &self,
4023 range: Range<T>,
4024 include_extra_context: bool,
4025 theme: Option<&SyntaxTheme>,
4026 ) -> Vec<OutlineItem<Point>> {
4027 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4028 range
4029 })
4030 }
4031
4032 fn outline_items_containing_internal<T: ToOffset, U>(
4033 &self,
4034 range: Range<T>,
4035 include_extra_context: bool,
4036 theme: Option<&SyntaxTheme>,
4037 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4038 ) -> Vec<OutlineItem<U>> {
4039 let range = range.to_offset(self);
4040 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4041 grammar.outline_config.as_ref().map(|c| &c.query)
4042 });
4043
4044 let mut items = Vec::new();
4045 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4046 while let Some(mat) = matches.peek() {
4047 let config = matches.grammars()[mat.grammar_index]
4048 .outline_config
4049 .as_ref()
4050 .unwrap();
4051 if let Some(item) =
4052 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4053 {
4054 items.push(item);
4055 } else if let Some(capture) = mat
4056 .captures
4057 .iter()
4058 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4059 {
4060 let capture_range = capture.node.start_position()..capture.node.end_position();
4061 let mut capture_row_range =
4062 capture_range.start.row as u32..capture_range.end.row as u32;
4063 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4064 {
4065 capture_row_range.end -= 1;
4066 }
4067 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4068 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4069 last_row_range.end = capture_row_range.end;
4070 } else {
4071 annotation_row_ranges.push(capture_row_range);
4072 }
4073 } else {
4074 annotation_row_ranges.push(capture_row_range);
4075 }
4076 }
4077 matches.advance();
4078 }
4079
4080 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4081
4082 // Assign depths based on containment relationships and convert to anchors.
4083 let mut item_ends_stack = Vec::<Point>::new();
4084 let mut anchor_items = Vec::new();
4085 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4086 for item in items {
4087 while let Some(last_end) = item_ends_stack.last().copied() {
4088 if last_end < item.range.end {
4089 item_ends_stack.pop();
4090 } else {
4091 break;
4092 }
4093 }
4094
4095 let mut annotation_row_range = None;
4096 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4097 let row_preceding_item = item.range.start.row.saturating_sub(1);
4098 if next_annotation_row_range.end < row_preceding_item {
4099 annotation_row_ranges.next();
4100 } else {
4101 if next_annotation_row_range.end == row_preceding_item {
4102 annotation_row_range = Some(next_annotation_row_range.clone());
4103 annotation_row_ranges.next();
4104 }
4105 break;
4106 }
4107 }
4108
4109 anchor_items.push(OutlineItem {
4110 depth: item_ends_stack.len(),
4111 range: range_callback(self, item.range.clone()),
4112 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4113 text: item.text,
4114 highlight_ranges: item.highlight_ranges,
4115 name_ranges: item.name_ranges,
4116 body_range: item.body_range.map(|r| range_callback(self, r)),
4117 annotation_range: annotation_row_range.map(|annotation_range| {
4118 let point_range = Point::new(annotation_range.start, 0)
4119 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4120 range_callback(self, point_range)
4121 }),
4122 });
4123 item_ends_stack.push(item.range.end);
4124 }
4125
4126 anchor_items
4127 }
4128
4129 fn next_outline_item(
4130 &self,
4131 config: &OutlineConfig,
4132 mat: &SyntaxMapMatch,
4133 range: &Range<usize>,
4134 include_extra_context: bool,
4135 theme: Option<&SyntaxTheme>,
4136 ) -> Option<OutlineItem<Point>> {
4137 let item_node = mat.captures.iter().find_map(|cap| {
4138 if cap.index == config.item_capture_ix {
4139 Some(cap.node)
4140 } else {
4141 None
4142 }
4143 })?;
4144
4145 let item_byte_range = item_node.byte_range();
4146 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4147 return None;
4148 }
4149 let item_point_range = Point::from_ts_point(item_node.start_position())
4150 ..Point::from_ts_point(item_node.end_position());
4151
4152 let mut open_point = None;
4153 let mut close_point = None;
4154
4155 let mut buffer_ranges = Vec::new();
4156 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4157 let mut range = node.start_byte()..node.end_byte();
4158 let start = node.start_position();
4159 if node.end_position().row > start.row {
4160 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4161 }
4162
4163 if !range.is_empty() {
4164 buffer_ranges.push((range, node_is_name));
4165 }
4166 };
4167
4168 for capture in mat.captures {
4169 if capture.index == config.name_capture_ix {
4170 add_to_buffer_ranges(capture.node, true);
4171 } else if Some(capture.index) == config.context_capture_ix
4172 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4173 {
4174 add_to_buffer_ranges(capture.node, false);
4175 } else {
4176 if Some(capture.index) == config.open_capture_ix {
4177 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4178 } else if Some(capture.index) == config.close_capture_ix {
4179 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4180 }
4181 }
4182 }
4183
4184 if buffer_ranges.is_empty() {
4185 return None;
4186 }
4187 let source_range_for_text =
4188 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4189
4190 let mut text = String::new();
4191 let mut highlight_ranges = Vec::new();
4192 let mut name_ranges = Vec::new();
4193 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4194 let mut last_buffer_range_end = 0;
4195 for (buffer_range, is_name) in buffer_ranges {
4196 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4197 if space_added {
4198 text.push(' ');
4199 }
4200 let before_append_len = text.len();
4201 let mut offset = buffer_range.start;
4202 chunks.seek(buffer_range.clone());
4203 for mut chunk in chunks.by_ref() {
4204 if chunk.text.len() > buffer_range.end - offset {
4205 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4206 offset = buffer_range.end;
4207 } else {
4208 offset += chunk.text.len();
4209 }
4210 let style = chunk
4211 .syntax_highlight_id
4212 .zip(theme)
4213 .and_then(|(highlight, theme)| highlight.style(theme));
4214 if let Some(style) = style {
4215 let start = text.len();
4216 let end = start + chunk.text.len();
4217 highlight_ranges.push((start..end, style));
4218 }
4219 text.push_str(chunk.text);
4220 if offset >= buffer_range.end {
4221 break;
4222 }
4223 }
4224 if is_name {
4225 let after_append_len = text.len();
4226 let start = if space_added && !name_ranges.is_empty() {
4227 before_append_len - 1
4228 } else {
4229 before_append_len
4230 };
4231 name_ranges.push(start..after_append_len);
4232 }
4233 last_buffer_range_end = buffer_range.end;
4234 }
4235
4236 Some(OutlineItem {
4237 depth: 0, // We'll calculate the depth later
4238 range: item_point_range,
4239 source_range_for_text: source_range_for_text.to_point(self),
4240 text,
4241 highlight_ranges,
4242 name_ranges,
4243 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4244 annotation_range: None,
4245 })
4246 }
4247
4248 pub fn function_body_fold_ranges<T: ToOffset>(
4249 &self,
4250 within: Range<T>,
4251 ) -> impl Iterator<Item = Range<usize>> + '_ {
4252 self.text_object_ranges(within, TreeSitterOptions::default())
4253 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4254 }
4255
4256 /// For each grammar in the language, runs the provided
4257 /// [`tree_sitter::Query`] against the given range.
4258 pub fn matches(
4259 &self,
4260 range: Range<usize>,
4261 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4262 ) -> SyntaxMapMatches<'_> {
4263 self.syntax.matches(range, self, query)
4264 }
4265
4266 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4267 /// Hence, may return more bracket pairs than the range contains.
4268 ///
4269 /// Will omit known chunks.
4270 /// The resulting bracket match collections are not ordered.
4271 pub fn fetch_bracket_ranges(
4272 &self,
4273 range: Range<usize>,
4274 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4275 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4276 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4277
4278 let known_chunks = match known_chunks {
4279 Some((known_version, known_chunks)) => {
4280 if !tree_sitter_data
4281 .chunks
4282 .version()
4283 .changed_since(known_version)
4284 {
4285 known_chunks.clone()
4286 } else {
4287 HashSet::default()
4288 }
4289 }
4290 None => HashSet::default(),
4291 };
4292
4293 let mut new_bracket_matches = HashMap::default();
4294 let mut all_bracket_matches = HashMap::default();
4295 let mut bracket_matches_to_color = HashMap::default();
4296
4297 for chunk in tree_sitter_data
4298 .chunks
4299 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4300 {
4301 if known_chunks.contains(&chunk.row_range()) {
4302 continue;
4303 }
4304 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4305 continue;
4306 };
4307 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4308
4309 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4310 Some(cached_brackets) => cached_brackets,
4311 None => {
4312 let mut bracket_pairs_ends = Vec::new();
4313 let mut matches =
4314 self.syntax
4315 .matches(chunk_range.clone(), &self.text, |grammar| {
4316 grammar.brackets_config.as_ref().map(|c| &c.query)
4317 });
4318 let configs = matches
4319 .grammars()
4320 .iter()
4321 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4322 .collect::<Vec<_>>();
4323
4324 let chunk_range = chunk_range.clone();
4325 let tree_sitter_matches = iter::from_fn(|| {
4326 while let Some(mat) = matches.peek() {
4327 let mut open = None;
4328 let mut close = None;
4329 let depth = mat.depth;
4330 let config = configs[mat.grammar_index];
4331 let pattern = &config.patterns[mat.pattern_index];
4332 for capture in mat.captures {
4333 if capture.index == config.open_capture_ix {
4334 open = Some(capture.node.byte_range());
4335 } else if capture.index == config.close_capture_ix {
4336 close = Some(capture.node.byte_range());
4337 }
4338 }
4339
4340 matches.advance();
4341
4342 let Some((open_range, close_range)) = open.zip(close) else {
4343 continue;
4344 };
4345
4346 let bracket_range = open_range.start..=close_range.end;
4347 if !bracket_range.overlaps(&chunk_range) {
4348 continue;
4349 }
4350
4351 if !pattern.rainbow_exclude
4352 // Also, certain languages have "brackets" that are not brackets, e.g. tags. and such
4353 // bracket will match the entire tag with all text inside.
4354 // For now, avoid highlighting any pair that has more than single char in each bracket.
4355 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4356 && (open_range.len() == 1 || close_range.len() == 1)
4357 {
4358 // Certain tree-sitter grammars may return more bracket pairs than needed:
4359 // see `test_markdown_bracket_colorization` for a set-up that returns pairs with the same start bracket and different end one.
4360 // Pick the pair with the shortest range in case of ambiguity.
4361 match bracket_matches_to_color.entry(open_range.clone()) {
4362 hash_map::Entry::Vacant(v) => {
4363 v.insert(close_range.clone());
4364 }
4365 hash_map::Entry::Occupied(mut o) => {
4366 let previous_close_range = o.get();
4367 let previous_length =
4368 previous_close_range.end - open_range.start;
4369 let new_length = close_range.end - open_range.start;
4370 if new_length < previous_length {
4371 o.insert(close_range.clone());
4372 }
4373 }
4374 }
4375 }
4376 return Some((open_range, close_range, pattern, depth));
4377 }
4378 None
4379 })
4380 .sorted_by_key(|(open_range, _, _, _)| open_range.start)
4381 .collect::<Vec<_>>();
4382
4383 let new_matches = tree_sitter_matches
4384 .into_iter()
4385 .map(|(open_range, close_range, pattern, syntax_layer_depth)| {
4386 let participates_in_colorizing =
4387 bracket_matches_to_color.get(&open_range).is_some_and(
4388 |close_range_to_color| close_range_to_color == &close_range,
4389 );
4390 let color_index = if participates_in_colorizing {
4391 while let Some(&last_bracket_end) = bracket_pairs_ends.last() {
4392 if last_bracket_end <= open_range.start {
4393 bracket_pairs_ends.pop();
4394 } else {
4395 break;
4396 }
4397 }
4398
4399 let bracket_depth = bracket_pairs_ends.len();
4400 bracket_pairs_ends.push(close_range.end);
4401 Some(bracket_depth)
4402 } else {
4403 None
4404 };
4405
4406 BracketMatch {
4407 open_range,
4408 close_range,
4409 syntax_layer_depth,
4410 newline_only: pattern.newline_only,
4411 color_index,
4412 }
4413 })
4414 .collect::<Vec<_>>();
4415
4416 new_bracket_matches.insert(chunk.id, new_matches.clone());
4417 new_matches
4418 }
4419 };
4420 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4421 }
4422
4423 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4424 if latest_tree_sitter_data.chunks.version() == &self.version {
4425 for (chunk_id, new_matches) in new_bracket_matches {
4426 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4427 if old_chunks.is_none() {
4428 *old_chunks = Some(new_matches);
4429 }
4430 }
4431 }
4432
4433 all_bracket_matches
4434 }
4435
4436 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4437 let mut tree_sitter_data = self.tree_sitter_data.lock();
4438 if self
4439 .version
4440 .changed_since(tree_sitter_data.chunks.version())
4441 {
4442 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4443 }
4444 tree_sitter_data
4445 }
4446
4447 pub fn all_bracket_ranges(
4448 &self,
4449 range: Range<usize>,
4450 ) -> impl Iterator<Item = BracketMatch<usize>> {
4451 self.fetch_bracket_ranges(range.clone(), None)
4452 .into_values()
4453 .flatten()
4454 .filter(move |bracket_match| {
4455 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4456 bracket_range.overlaps(&range)
4457 })
4458 }
4459
4460 /// Returns bracket range pairs overlapping or adjacent to `range`
4461 pub fn bracket_ranges<T: ToOffset>(
4462 &self,
4463 range: Range<T>,
4464 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4465 // Find bracket pairs that *inclusively* contain the given range.
4466 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4467 self.all_bracket_ranges(range)
4468 .filter(|pair| !pair.newline_only)
4469 }
4470
4471 pub fn debug_variables_query<T: ToOffset>(
4472 &self,
4473 range: Range<T>,
4474 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4475 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4476
4477 let mut matches = self.syntax.matches_with_options(
4478 range.clone(),
4479 &self.text,
4480 TreeSitterOptions::default(),
4481 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4482 );
4483
4484 let configs = matches
4485 .grammars()
4486 .iter()
4487 .map(|grammar| grammar.debug_variables_config.as_ref())
4488 .collect::<Vec<_>>();
4489
4490 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4491
4492 iter::from_fn(move || {
4493 loop {
4494 while let Some(capture) = captures.pop() {
4495 if capture.0.overlaps(&range) {
4496 return Some(capture);
4497 }
4498 }
4499
4500 let mat = matches.peek()?;
4501
4502 let Some(config) = configs[mat.grammar_index].as_ref() else {
4503 matches.advance();
4504 continue;
4505 };
4506
4507 for capture in mat.captures {
4508 let Some(ix) = config
4509 .objects_by_capture_ix
4510 .binary_search_by_key(&capture.index, |e| e.0)
4511 .ok()
4512 else {
4513 continue;
4514 };
4515 let text_object = config.objects_by_capture_ix[ix].1;
4516 let byte_range = capture.node.byte_range();
4517
4518 let mut found = false;
4519 for (range, existing) in captures.iter_mut() {
4520 if existing == &text_object {
4521 range.start = range.start.min(byte_range.start);
4522 range.end = range.end.max(byte_range.end);
4523 found = true;
4524 break;
4525 }
4526 }
4527
4528 if !found {
4529 captures.push((byte_range, text_object));
4530 }
4531 }
4532
4533 matches.advance();
4534 }
4535 })
4536 }
4537
4538 pub fn text_object_ranges<T: ToOffset>(
4539 &self,
4540 range: Range<T>,
4541 options: TreeSitterOptions,
4542 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4543 let range =
4544 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4545
4546 let mut matches =
4547 self.syntax
4548 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4549 grammar.text_object_config.as_ref().map(|c| &c.query)
4550 });
4551
4552 let configs = matches
4553 .grammars()
4554 .iter()
4555 .map(|grammar| grammar.text_object_config.as_ref())
4556 .collect::<Vec<_>>();
4557
4558 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4559
4560 iter::from_fn(move || {
4561 loop {
4562 while let Some(capture) = captures.pop() {
4563 if capture.0.overlaps(&range) {
4564 return Some(capture);
4565 }
4566 }
4567
4568 let mat = matches.peek()?;
4569
4570 let Some(config) = configs[mat.grammar_index].as_ref() else {
4571 matches.advance();
4572 continue;
4573 };
4574
4575 for capture in mat.captures {
4576 let Some(ix) = config
4577 .text_objects_by_capture_ix
4578 .binary_search_by_key(&capture.index, |e| e.0)
4579 .ok()
4580 else {
4581 continue;
4582 };
4583 let text_object = config.text_objects_by_capture_ix[ix].1;
4584 let byte_range = capture.node.byte_range();
4585
4586 let mut found = false;
4587 for (range, existing) in captures.iter_mut() {
4588 if existing == &text_object {
4589 range.start = range.start.min(byte_range.start);
4590 range.end = range.end.max(byte_range.end);
4591 found = true;
4592 break;
4593 }
4594 }
4595
4596 if !found {
4597 captures.push((byte_range, text_object));
4598 }
4599 }
4600
4601 matches.advance();
4602 }
4603 })
4604 }
4605
4606 /// Returns enclosing bracket ranges containing the given range
4607 pub fn enclosing_bracket_ranges<T: ToOffset>(
4608 &self,
4609 range: Range<T>,
4610 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4611 let range = range.start.to_offset(self)..range.end.to_offset(self);
4612
4613 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4614 let max_depth = result
4615 .iter()
4616 .map(|mat| mat.syntax_layer_depth)
4617 .max()
4618 .unwrap_or(0);
4619 result.into_iter().filter(move |pair| {
4620 pair.open_range.start <= range.start
4621 && pair.close_range.end >= range.end
4622 && pair.syntax_layer_depth == max_depth
4623 })
4624 }
4625
4626 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4627 ///
4628 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4629 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4630 &self,
4631 range: Range<T>,
4632 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4633 ) -> Option<(Range<usize>, Range<usize>)> {
4634 let range = range.start.to_offset(self)..range.end.to_offset(self);
4635
4636 // Get the ranges of the innermost pair of brackets.
4637 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4638
4639 for pair in self.enclosing_bracket_ranges(range) {
4640 if let Some(range_filter) = range_filter
4641 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4642 {
4643 continue;
4644 }
4645
4646 let len = pair.close_range.end - pair.open_range.start;
4647
4648 if let Some((existing_open, existing_close)) = &result {
4649 let existing_len = existing_close.end - existing_open.start;
4650 if len > existing_len {
4651 continue;
4652 }
4653 }
4654
4655 result = Some((pair.open_range, pair.close_range));
4656 }
4657
4658 result
4659 }
4660
4661 /// Returns anchor ranges for any matches of the redaction query.
4662 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4663 /// will be run on the relevant section of the buffer.
4664 pub fn redacted_ranges<T: ToOffset>(
4665 &self,
4666 range: Range<T>,
4667 ) -> impl Iterator<Item = Range<usize>> + '_ {
4668 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4669 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4670 grammar
4671 .redactions_config
4672 .as_ref()
4673 .map(|config| &config.query)
4674 });
4675
4676 let configs = syntax_matches
4677 .grammars()
4678 .iter()
4679 .map(|grammar| grammar.redactions_config.as_ref())
4680 .collect::<Vec<_>>();
4681
4682 iter::from_fn(move || {
4683 let redacted_range = syntax_matches
4684 .peek()
4685 .and_then(|mat| {
4686 configs[mat.grammar_index].and_then(|config| {
4687 mat.captures
4688 .iter()
4689 .find(|capture| capture.index == config.redaction_capture_ix)
4690 })
4691 })
4692 .map(|mat| mat.node.byte_range());
4693 syntax_matches.advance();
4694 redacted_range
4695 })
4696 }
4697
4698 pub fn injections_intersecting_range<T: ToOffset>(
4699 &self,
4700 range: Range<T>,
4701 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4702 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4703
4704 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4705 grammar
4706 .injection_config
4707 .as_ref()
4708 .map(|config| &config.query)
4709 });
4710
4711 let configs = syntax_matches
4712 .grammars()
4713 .iter()
4714 .map(|grammar| grammar.injection_config.as_ref())
4715 .collect::<Vec<_>>();
4716
4717 iter::from_fn(move || {
4718 let ranges = syntax_matches.peek().and_then(|mat| {
4719 let config = &configs[mat.grammar_index]?;
4720 let content_capture_range = mat.captures.iter().find_map(|capture| {
4721 if capture.index == config.content_capture_ix {
4722 Some(capture.node.byte_range())
4723 } else {
4724 None
4725 }
4726 })?;
4727 let language = self.language_at(content_capture_range.start)?;
4728 Some((content_capture_range, language))
4729 });
4730 syntax_matches.advance();
4731 ranges
4732 })
4733 }
4734
4735 pub fn runnable_ranges(
4736 &self,
4737 offset_range: Range<usize>,
4738 ) -> impl Iterator<Item = RunnableRange> + '_ {
4739 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4740 grammar.runnable_config.as_ref().map(|config| &config.query)
4741 });
4742
4743 let test_configs = syntax_matches
4744 .grammars()
4745 .iter()
4746 .map(|grammar| grammar.runnable_config.as_ref())
4747 .collect::<Vec<_>>();
4748
4749 iter::from_fn(move || {
4750 loop {
4751 let mat = syntax_matches.peek()?;
4752
4753 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4754 let mut run_range = None;
4755 let full_range = mat.captures.iter().fold(
4756 Range {
4757 start: usize::MAX,
4758 end: 0,
4759 },
4760 |mut acc, next| {
4761 let byte_range = next.node.byte_range();
4762 if acc.start > byte_range.start {
4763 acc.start = byte_range.start;
4764 }
4765 if acc.end < byte_range.end {
4766 acc.end = byte_range.end;
4767 }
4768 acc
4769 },
4770 );
4771 if full_range.start > full_range.end {
4772 // We did not find a full spanning range of this match.
4773 return None;
4774 }
4775 let extra_captures: SmallVec<[_; 1]> =
4776 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4777 test_configs
4778 .extra_captures
4779 .get(capture.index as usize)
4780 .cloned()
4781 .and_then(|tag_name| match tag_name {
4782 RunnableCapture::Named(name) => {
4783 Some((capture.node.byte_range(), name))
4784 }
4785 RunnableCapture::Run => {
4786 let _ = run_range.insert(capture.node.byte_range());
4787 None
4788 }
4789 })
4790 }));
4791 let run_range = run_range?;
4792 let tags = test_configs
4793 .query
4794 .property_settings(mat.pattern_index)
4795 .iter()
4796 .filter_map(|property| {
4797 if *property.key == *"tag" {
4798 property
4799 .value
4800 .as_ref()
4801 .map(|value| RunnableTag(value.to_string().into()))
4802 } else {
4803 None
4804 }
4805 })
4806 .collect();
4807 let extra_captures = extra_captures
4808 .into_iter()
4809 .map(|(range, name)| {
4810 (
4811 name.to_string(),
4812 self.text_for_range(range).collect::<String>(),
4813 )
4814 })
4815 .collect();
4816 // All tags should have the same range.
4817 Some(RunnableRange {
4818 run_range,
4819 full_range,
4820 runnable: Runnable {
4821 tags,
4822 language: mat.language,
4823 buffer: self.remote_id(),
4824 },
4825 extra_captures,
4826 buffer_id: self.remote_id(),
4827 })
4828 });
4829
4830 syntax_matches.advance();
4831 if test_range.is_some() {
4832 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4833 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4834 return test_range;
4835 }
4836 }
4837 })
4838 }
4839
4840 /// Returns selections for remote peers intersecting the given range.
4841 #[allow(clippy::type_complexity)]
4842 pub fn selections_in_range(
4843 &self,
4844 range: Range<Anchor>,
4845 include_local: bool,
4846 ) -> impl Iterator<
4847 Item = (
4848 ReplicaId,
4849 bool,
4850 CursorShape,
4851 impl Iterator<Item = &Selection<Anchor>> + '_,
4852 ),
4853 > + '_ {
4854 self.remote_selections
4855 .iter()
4856 .filter(move |(replica_id, set)| {
4857 (include_local || **replica_id != self.text.replica_id())
4858 && !set.selections.is_empty()
4859 })
4860 .map(move |(replica_id, set)| {
4861 let start_ix = match set.selections.binary_search_by(|probe| {
4862 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4863 }) {
4864 Ok(ix) | Err(ix) => ix,
4865 };
4866 let end_ix = match set.selections.binary_search_by(|probe| {
4867 probe.start.cmp(&range.end, self).then(Ordering::Less)
4868 }) {
4869 Ok(ix) | Err(ix) => ix,
4870 };
4871
4872 (
4873 *replica_id,
4874 set.line_mode,
4875 set.cursor_shape,
4876 set.selections[start_ix..end_ix].iter(),
4877 )
4878 })
4879 }
4880
4881 /// Returns if the buffer contains any diagnostics.
4882 pub fn has_diagnostics(&self) -> bool {
4883 !self.diagnostics.is_empty()
4884 }
4885
4886 /// Returns all the diagnostics intersecting the given range.
4887 pub fn diagnostics_in_range<'a, T, O>(
4888 &'a self,
4889 search_range: Range<T>,
4890 reversed: bool,
4891 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4892 where
4893 T: 'a + Clone + ToOffset,
4894 O: 'a + FromAnchor,
4895 {
4896 let mut iterators: Vec<_> = self
4897 .diagnostics
4898 .iter()
4899 .map(|(_, collection)| {
4900 collection
4901 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4902 .peekable()
4903 })
4904 .collect();
4905
4906 std::iter::from_fn(move || {
4907 let (next_ix, _) = iterators
4908 .iter_mut()
4909 .enumerate()
4910 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4911 .min_by(|(_, a), (_, b)| {
4912 let cmp = a
4913 .range
4914 .start
4915 .cmp(&b.range.start, self)
4916 // when range is equal, sort by diagnostic severity
4917 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4918 // and stabilize order with group_id
4919 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4920 if reversed { cmp.reverse() } else { cmp }
4921 })?;
4922 iterators[next_ix]
4923 .next()
4924 .map(
4925 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4926 diagnostic,
4927 range: FromAnchor::from_anchor(&range.start, self)
4928 ..FromAnchor::from_anchor(&range.end, self),
4929 },
4930 )
4931 })
4932 }
4933
4934 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4935 /// should be used instead.
4936 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4937 &self.diagnostics
4938 }
4939
4940 /// Returns all the diagnostic groups associated with the given
4941 /// language server ID. If no language server ID is provided,
4942 /// all diagnostics groups are returned.
4943 pub fn diagnostic_groups(
4944 &self,
4945 language_server_id: Option<LanguageServerId>,
4946 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4947 let mut groups = Vec::new();
4948
4949 if let Some(language_server_id) = language_server_id {
4950 if let Ok(ix) = self
4951 .diagnostics
4952 .binary_search_by_key(&language_server_id, |e| e.0)
4953 {
4954 self.diagnostics[ix]
4955 .1
4956 .groups(language_server_id, &mut groups, self);
4957 }
4958 } else {
4959 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4960 diagnostics.groups(*language_server_id, &mut groups, self);
4961 }
4962 }
4963
4964 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4965 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4966 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4967 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4968 });
4969
4970 groups
4971 }
4972
4973 /// Returns an iterator over the diagnostics for the given group.
4974 pub fn diagnostic_group<O>(
4975 &self,
4976 group_id: usize,
4977 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4978 where
4979 O: FromAnchor + 'static,
4980 {
4981 self.diagnostics
4982 .iter()
4983 .flat_map(move |(_, set)| set.group(group_id, self))
4984 }
4985
4986 /// An integer version number that accounts for all updates besides
4987 /// the buffer's text itself (which is versioned via a version vector).
4988 pub fn non_text_state_update_count(&self) -> usize {
4989 self.non_text_state_update_count
4990 }
4991
4992 /// An integer version that changes when the buffer's syntax changes.
4993 pub fn syntax_update_count(&self) -> usize {
4994 self.syntax.update_count()
4995 }
4996
4997 /// Returns a snapshot of underlying file.
4998 pub fn file(&self) -> Option<&Arc<dyn File>> {
4999 self.file.as_ref()
5000 }
5001
5002 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5003 if let Some(file) = self.file() {
5004 if file.path().file_name().is_none() || include_root {
5005 Some(file.full_path(cx).to_string_lossy().into_owned())
5006 } else {
5007 Some(file.path().display(file.path_style(cx)).to_string())
5008 }
5009 } else {
5010 None
5011 }
5012 }
5013
5014 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5015 let query_str = query.fuzzy_contents;
5016 if query_str.is_some_and(|query| query.is_empty()) {
5017 return BTreeMap::default();
5018 }
5019
5020 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5021 language,
5022 override_id: None,
5023 }));
5024
5025 let mut query_ix = 0;
5026 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5027 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5028
5029 let mut words = BTreeMap::default();
5030 let mut current_word_start_ix = None;
5031 let mut chunk_ix = query.range.start;
5032 for chunk in self.chunks(query.range, false) {
5033 for (i, c) in chunk.text.char_indices() {
5034 let ix = chunk_ix + i;
5035 if classifier.is_word(c) {
5036 if current_word_start_ix.is_none() {
5037 current_word_start_ix = Some(ix);
5038 }
5039
5040 if let Some(query_chars) = &query_chars
5041 && query_ix < query_len
5042 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5043 {
5044 query_ix += 1;
5045 }
5046 continue;
5047 } else if let Some(word_start) = current_word_start_ix.take()
5048 && query_ix == query_len
5049 {
5050 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5051 let mut word_text = self.text_for_range(word_start..ix).peekable();
5052 let first_char = word_text
5053 .peek()
5054 .and_then(|first_chunk| first_chunk.chars().next());
5055 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5056 if !query.skip_digits
5057 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5058 {
5059 words.insert(word_text.collect(), word_range);
5060 }
5061 }
5062 query_ix = 0;
5063 }
5064 chunk_ix += chunk.text.len();
5065 }
5066
5067 words
5068 }
5069}
5070
5071pub struct WordsQuery<'a> {
5072 /// Only returns words with all chars from the fuzzy string in them.
5073 pub fuzzy_contents: Option<&'a str>,
5074 /// Skips words that start with a digit.
5075 pub skip_digits: bool,
5076 /// Buffer offset range, to look for words.
5077 pub range: Range<usize>,
5078}
5079
5080fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5081 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5082}
5083
5084fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5085 let mut result = IndentSize::spaces(0);
5086 for c in text {
5087 let kind = match c {
5088 ' ' => IndentKind::Space,
5089 '\t' => IndentKind::Tab,
5090 _ => break,
5091 };
5092 if result.len == 0 {
5093 result.kind = kind;
5094 }
5095 result.len += 1;
5096 }
5097 result
5098}
5099
5100impl Clone for BufferSnapshot {
5101 fn clone(&self) -> Self {
5102 Self {
5103 text: self.text.clone(),
5104 syntax: self.syntax.clone(),
5105 file: self.file.clone(),
5106 remote_selections: self.remote_selections.clone(),
5107 diagnostics: self.diagnostics.clone(),
5108 language: self.language.clone(),
5109 tree_sitter_data: self.tree_sitter_data.clone(),
5110 non_text_state_update_count: self.non_text_state_update_count,
5111 }
5112 }
5113}
5114
5115impl Deref for BufferSnapshot {
5116 type Target = text::BufferSnapshot;
5117
5118 fn deref(&self) -> &Self::Target {
5119 &self.text
5120 }
5121}
5122
5123unsafe impl Send for BufferChunks<'_> {}
5124
5125impl<'a> BufferChunks<'a> {
5126 pub(crate) fn new(
5127 text: &'a Rope,
5128 range: Range<usize>,
5129 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5130 diagnostics: bool,
5131 buffer_snapshot: Option<&'a BufferSnapshot>,
5132 ) -> Self {
5133 let mut highlights = None;
5134 if let Some((captures, highlight_maps)) = syntax {
5135 highlights = Some(BufferChunkHighlights {
5136 captures,
5137 next_capture: None,
5138 stack: Default::default(),
5139 highlight_maps,
5140 })
5141 }
5142
5143 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5144 let chunks = text.chunks_in_range(range.clone());
5145
5146 let mut this = BufferChunks {
5147 range,
5148 buffer_snapshot,
5149 chunks,
5150 diagnostic_endpoints,
5151 error_depth: 0,
5152 warning_depth: 0,
5153 information_depth: 0,
5154 hint_depth: 0,
5155 unnecessary_depth: 0,
5156 underline: true,
5157 highlights,
5158 };
5159 this.initialize_diagnostic_endpoints();
5160 this
5161 }
5162
5163 /// Seeks to the given byte offset in the buffer.
5164 pub fn seek(&mut self, range: Range<usize>) {
5165 let old_range = std::mem::replace(&mut self.range, range.clone());
5166 self.chunks.set_range(self.range.clone());
5167 if let Some(highlights) = self.highlights.as_mut() {
5168 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5169 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5170 highlights
5171 .stack
5172 .retain(|(end_offset, _)| *end_offset > range.start);
5173 if let Some(capture) = &highlights.next_capture
5174 && range.start >= capture.node.start_byte()
5175 {
5176 let next_capture_end = capture.node.end_byte();
5177 if range.start < next_capture_end {
5178 highlights.stack.push((
5179 next_capture_end,
5180 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5181 ));
5182 }
5183 highlights.next_capture.take();
5184 }
5185 } else if let Some(snapshot) = self.buffer_snapshot {
5186 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5187 *highlights = BufferChunkHighlights {
5188 captures,
5189 next_capture: None,
5190 stack: Default::default(),
5191 highlight_maps,
5192 };
5193 } else {
5194 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5195 // Seeking such BufferChunks is not supported.
5196 debug_assert!(
5197 false,
5198 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5199 );
5200 }
5201
5202 highlights.captures.set_byte_range(self.range.clone());
5203 self.initialize_diagnostic_endpoints();
5204 }
5205 }
5206
5207 fn initialize_diagnostic_endpoints(&mut self) {
5208 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5209 && let Some(buffer) = self.buffer_snapshot
5210 {
5211 let mut diagnostic_endpoints = Vec::new();
5212 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5213 diagnostic_endpoints.push(DiagnosticEndpoint {
5214 offset: entry.range.start,
5215 is_start: true,
5216 severity: entry.diagnostic.severity,
5217 is_unnecessary: entry.diagnostic.is_unnecessary,
5218 underline: entry.diagnostic.underline,
5219 });
5220 diagnostic_endpoints.push(DiagnosticEndpoint {
5221 offset: entry.range.end,
5222 is_start: false,
5223 severity: entry.diagnostic.severity,
5224 is_unnecessary: entry.diagnostic.is_unnecessary,
5225 underline: entry.diagnostic.underline,
5226 });
5227 }
5228 diagnostic_endpoints
5229 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5230 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5231 self.hint_depth = 0;
5232 self.error_depth = 0;
5233 self.warning_depth = 0;
5234 self.information_depth = 0;
5235 }
5236 }
5237
5238 /// The current byte offset in the buffer.
5239 pub fn offset(&self) -> usize {
5240 self.range.start
5241 }
5242
5243 pub fn range(&self) -> Range<usize> {
5244 self.range.clone()
5245 }
5246
5247 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5248 let depth = match endpoint.severity {
5249 DiagnosticSeverity::ERROR => &mut self.error_depth,
5250 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5251 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5252 DiagnosticSeverity::HINT => &mut self.hint_depth,
5253 _ => return,
5254 };
5255 if endpoint.is_start {
5256 *depth += 1;
5257 } else {
5258 *depth -= 1;
5259 }
5260
5261 if endpoint.is_unnecessary {
5262 if endpoint.is_start {
5263 self.unnecessary_depth += 1;
5264 } else {
5265 self.unnecessary_depth -= 1;
5266 }
5267 }
5268 }
5269
5270 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5271 if self.error_depth > 0 {
5272 Some(DiagnosticSeverity::ERROR)
5273 } else if self.warning_depth > 0 {
5274 Some(DiagnosticSeverity::WARNING)
5275 } else if self.information_depth > 0 {
5276 Some(DiagnosticSeverity::INFORMATION)
5277 } else if self.hint_depth > 0 {
5278 Some(DiagnosticSeverity::HINT)
5279 } else {
5280 None
5281 }
5282 }
5283
5284 fn current_code_is_unnecessary(&self) -> bool {
5285 self.unnecessary_depth > 0
5286 }
5287}
5288
5289impl<'a> Iterator for BufferChunks<'a> {
5290 type Item = Chunk<'a>;
5291
5292 fn next(&mut self) -> Option<Self::Item> {
5293 let mut next_capture_start = usize::MAX;
5294 let mut next_diagnostic_endpoint = usize::MAX;
5295
5296 if let Some(highlights) = self.highlights.as_mut() {
5297 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5298 if *parent_capture_end <= self.range.start {
5299 highlights.stack.pop();
5300 } else {
5301 break;
5302 }
5303 }
5304
5305 if highlights.next_capture.is_none() {
5306 highlights.next_capture = highlights.captures.next();
5307 }
5308
5309 while let Some(capture) = highlights.next_capture.as_ref() {
5310 if self.range.start < capture.node.start_byte() {
5311 next_capture_start = capture.node.start_byte();
5312 break;
5313 } else {
5314 let highlight_id =
5315 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5316 highlights
5317 .stack
5318 .push((capture.node.end_byte(), highlight_id));
5319 highlights.next_capture = highlights.captures.next();
5320 }
5321 }
5322 }
5323
5324 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5325 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5326 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5327 if endpoint.offset <= self.range.start {
5328 self.update_diagnostic_depths(endpoint);
5329 diagnostic_endpoints.next();
5330 self.underline = endpoint.underline;
5331 } else {
5332 next_diagnostic_endpoint = endpoint.offset;
5333 break;
5334 }
5335 }
5336 }
5337 self.diagnostic_endpoints = diagnostic_endpoints;
5338
5339 if let Some(ChunkBitmaps {
5340 text: chunk,
5341 chars: chars_map,
5342 tabs,
5343 }) = self.chunks.peek_with_bitmaps()
5344 {
5345 let chunk_start = self.range.start;
5346 let mut chunk_end = (self.chunks.offset() + chunk.len())
5347 .min(next_capture_start)
5348 .min(next_diagnostic_endpoint);
5349 let mut highlight_id = None;
5350 if let Some(highlights) = self.highlights.as_ref()
5351 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5352 {
5353 chunk_end = chunk_end.min(*parent_capture_end);
5354 highlight_id = Some(*parent_highlight_id);
5355 }
5356 let bit_start = chunk_start - self.chunks.offset();
5357 let bit_end = chunk_end - self.chunks.offset();
5358
5359 let slice = &chunk[bit_start..bit_end];
5360
5361 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5362 let tabs = (tabs >> bit_start) & mask;
5363 let chars = (chars_map >> bit_start) & mask;
5364
5365 self.range.start = chunk_end;
5366 if self.range.start == self.chunks.offset() + chunk.len() {
5367 self.chunks.next().unwrap();
5368 }
5369
5370 Some(Chunk {
5371 text: slice,
5372 syntax_highlight_id: highlight_id,
5373 underline: self.underline,
5374 diagnostic_severity: self.current_diagnostic_severity(),
5375 is_unnecessary: self.current_code_is_unnecessary(),
5376 tabs,
5377 chars,
5378 ..Chunk::default()
5379 })
5380 } else {
5381 None
5382 }
5383 }
5384}
5385
5386impl operation_queue::Operation for Operation {
5387 fn lamport_timestamp(&self) -> clock::Lamport {
5388 match self {
5389 Operation::Buffer(_) => {
5390 unreachable!("buffer operations should never be deferred at this layer")
5391 }
5392 Operation::UpdateDiagnostics {
5393 lamport_timestamp, ..
5394 }
5395 | Operation::UpdateSelections {
5396 lamport_timestamp, ..
5397 }
5398 | Operation::UpdateCompletionTriggers {
5399 lamport_timestamp, ..
5400 }
5401 | Operation::UpdateLineEnding {
5402 lamport_timestamp, ..
5403 } => *lamport_timestamp,
5404 }
5405 }
5406}
5407
5408impl Default for Diagnostic {
5409 fn default() -> Self {
5410 Self {
5411 source: Default::default(),
5412 source_kind: DiagnosticSourceKind::Other,
5413 code: None,
5414 code_description: None,
5415 severity: DiagnosticSeverity::ERROR,
5416 message: Default::default(),
5417 markdown: None,
5418 group_id: 0,
5419 is_primary: false,
5420 is_disk_based: false,
5421 is_unnecessary: false,
5422 underline: true,
5423 data: None,
5424 }
5425 }
5426}
5427
5428impl IndentSize {
5429 /// Returns an [`IndentSize`] representing the given spaces.
5430 pub fn spaces(len: u32) -> Self {
5431 Self {
5432 len,
5433 kind: IndentKind::Space,
5434 }
5435 }
5436
5437 /// Returns an [`IndentSize`] representing a tab.
5438 pub fn tab() -> Self {
5439 Self {
5440 len: 1,
5441 kind: IndentKind::Tab,
5442 }
5443 }
5444
5445 /// An iterator over the characters represented by this [`IndentSize`].
5446 pub fn chars(&self) -> impl Iterator<Item = char> {
5447 iter::repeat(self.char()).take(self.len as usize)
5448 }
5449
5450 /// The character representation of this [`IndentSize`].
5451 pub fn char(&self) -> char {
5452 match self.kind {
5453 IndentKind::Space => ' ',
5454 IndentKind::Tab => '\t',
5455 }
5456 }
5457
5458 /// Consumes the current [`IndentSize`] and returns a new one that has
5459 /// been shrunk or enlarged by the given size along the given direction.
5460 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5461 match direction {
5462 Ordering::Less => {
5463 if self.kind == size.kind && self.len >= size.len {
5464 self.len -= size.len;
5465 }
5466 }
5467 Ordering::Equal => {}
5468 Ordering::Greater => {
5469 if self.len == 0 {
5470 self = size;
5471 } else if self.kind == size.kind {
5472 self.len += size.len;
5473 }
5474 }
5475 }
5476 self
5477 }
5478
5479 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5480 match self.kind {
5481 IndentKind::Space => self.len as usize,
5482 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5483 }
5484 }
5485}
5486
5487#[cfg(any(test, feature = "test-support"))]
5488pub struct TestFile {
5489 pub path: Arc<RelPath>,
5490 pub root_name: String,
5491 pub local_root: Option<PathBuf>,
5492}
5493
5494#[cfg(any(test, feature = "test-support"))]
5495impl File for TestFile {
5496 fn path(&self) -> &Arc<RelPath> {
5497 &self.path
5498 }
5499
5500 fn full_path(&self, _: &gpui::App) -> PathBuf {
5501 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5502 }
5503
5504 fn as_local(&self) -> Option<&dyn LocalFile> {
5505 if self.local_root.is_some() {
5506 Some(self)
5507 } else {
5508 None
5509 }
5510 }
5511
5512 fn disk_state(&self) -> DiskState {
5513 unimplemented!()
5514 }
5515
5516 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5517 self.path().file_name().unwrap_or(self.root_name.as_ref())
5518 }
5519
5520 fn worktree_id(&self, _: &App) -> WorktreeId {
5521 WorktreeId::from_usize(0)
5522 }
5523
5524 fn to_proto(&self, _: &App) -> rpc::proto::File {
5525 unimplemented!()
5526 }
5527
5528 fn is_private(&self) -> bool {
5529 false
5530 }
5531
5532 fn path_style(&self, _cx: &App) -> PathStyle {
5533 PathStyle::local()
5534 }
5535}
5536
5537#[cfg(any(test, feature = "test-support"))]
5538impl LocalFile for TestFile {
5539 fn abs_path(&self, _cx: &App) -> PathBuf {
5540 PathBuf::from(self.local_root.as_ref().unwrap())
5541 .join(&self.root_name)
5542 .join(self.path.as_std_path())
5543 }
5544
5545 fn load(&self, _cx: &App) -> Task<Result<String>> {
5546 unimplemented!()
5547 }
5548
5549 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5550 unimplemented!()
5551 }
5552}
5553
5554pub(crate) fn contiguous_ranges(
5555 values: impl Iterator<Item = u32>,
5556 max_len: usize,
5557) -> impl Iterator<Item = Range<u32>> {
5558 let mut values = values;
5559 let mut current_range: Option<Range<u32>> = None;
5560 std::iter::from_fn(move || {
5561 loop {
5562 if let Some(value) = values.next() {
5563 if let Some(range) = &mut current_range
5564 && value == range.end
5565 && range.len() < max_len
5566 {
5567 range.end += 1;
5568 continue;
5569 }
5570
5571 let prev_range = current_range.clone();
5572 current_range = Some(value..(value + 1));
5573 if prev_range.is_some() {
5574 return prev_range;
5575 }
5576 } else {
5577 return current_range.take();
5578 }
5579 }
5580 })
5581}
5582
5583#[derive(Default, Debug)]
5584pub struct CharClassifier {
5585 scope: Option<LanguageScope>,
5586 scope_context: Option<CharScopeContext>,
5587 ignore_punctuation: bool,
5588}
5589
5590impl CharClassifier {
5591 pub fn new(scope: Option<LanguageScope>) -> Self {
5592 Self {
5593 scope,
5594 scope_context: None,
5595 ignore_punctuation: false,
5596 }
5597 }
5598
5599 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5600 Self {
5601 scope_context,
5602 ..self
5603 }
5604 }
5605
5606 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5607 Self {
5608 ignore_punctuation,
5609 ..self
5610 }
5611 }
5612
5613 pub fn is_whitespace(&self, c: char) -> bool {
5614 self.kind(c) == CharKind::Whitespace
5615 }
5616
5617 pub fn is_word(&self, c: char) -> bool {
5618 self.kind(c) == CharKind::Word
5619 }
5620
5621 pub fn is_punctuation(&self, c: char) -> bool {
5622 self.kind(c) == CharKind::Punctuation
5623 }
5624
5625 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5626 if c.is_alphanumeric() || c == '_' {
5627 return CharKind::Word;
5628 }
5629
5630 if let Some(scope) = &self.scope {
5631 let characters = match self.scope_context {
5632 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5633 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5634 None => scope.word_characters(),
5635 };
5636 if let Some(characters) = characters
5637 && characters.contains(&c)
5638 {
5639 return CharKind::Word;
5640 }
5641 }
5642
5643 if c.is_whitespace() {
5644 return CharKind::Whitespace;
5645 }
5646
5647 if ignore_punctuation {
5648 CharKind::Word
5649 } else {
5650 CharKind::Punctuation
5651 }
5652 }
5653
5654 pub fn kind(&self, c: char) -> CharKind {
5655 self.kind_with(c, self.ignore_punctuation)
5656 }
5657}
5658
5659/// Find all of the ranges of whitespace that occur at the ends of lines
5660/// in the given rope.
5661///
5662/// This could also be done with a regex search, but this implementation
5663/// avoids copying text.
5664pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5665 let mut ranges = Vec::new();
5666
5667 let mut offset = 0;
5668 let mut prev_chunk_trailing_whitespace_range = 0..0;
5669 for chunk in rope.chunks() {
5670 let mut prev_line_trailing_whitespace_range = 0..0;
5671 for (i, line) in chunk.split('\n').enumerate() {
5672 let line_end_offset = offset + line.len();
5673 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5674 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5675
5676 if i == 0 && trimmed_line_len == 0 {
5677 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5678 }
5679 if !prev_line_trailing_whitespace_range.is_empty() {
5680 ranges.push(prev_line_trailing_whitespace_range);
5681 }
5682
5683 offset = line_end_offset + 1;
5684 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5685 }
5686
5687 offset -= 1;
5688 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5689 }
5690
5691 if !prev_chunk_trailing_whitespace_range.is_empty() {
5692 ranges.push(prev_chunk_trailing_whitespace_range);
5693 }
5694
5695 ranges
5696}