1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// The ID provided by the dynamic registration that produced this diagnostic.
241 pub registration_id: Option<SharedString>,
242 /// A machine-readable code that identifies this diagnostic.
243 pub code: Option<NumberOrString>,
244 pub code_description: Option<lsp::Uri>,
245 /// Whether this diagnostic is a hint, warning, or error.
246 pub severity: DiagnosticSeverity,
247 /// The human-readable message associated with this diagnostic.
248 pub message: String,
249 /// The human-readable message (in markdown format)
250 pub markdown: Option<String>,
251 /// An id that identifies the group to which this diagnostic belongs.
252 ///
253 /// When a language server produces a diagnostic with
254 /// one or more associated diagnostics, those diagnostics are all
255 /// assigned a single group ID.
256 pub group_id: usize,
257 /// Whether this diagnostic is the primary diagnostic for its group.
258 ///
259 /// In a given group, the primary diagnostic is the top-level diagnostic
260 /// returned by the language server. The non-primary diagnostics are the
261 /// associated diagnostics.
262 pub is_primary: bool,
263 /// Whether this diagnostic is considered to originate from an analysis of
264 /// files on disk, as opposed to any unsaved buffer contents. This is a
265 /// property of a given diagnostic source, and is configured for a given
266 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
267 /// for the language server.
268 pub is_disk_based: bool,
269 /// Whether this diagnostic marks unnecessary code.
270 pub is_unnecessary: bool,
271 /// Quick separation of diagnostics groups based by their source.
272 pub source_kind: DiagnosticSourceKind,
273 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
274 pub data: Option<Value>,
275 /// Whether to underline the corresponding text range in the editor.
276 pub underline: bool,
277}
278
279#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
280pub enum DiagnosticSourceKind {
281 Pulled,
282 Pushed,
283 Other,
284}
285
286/// An operation used to synchronize this buffer with its other replicas.
287#[derive(Clone, Debug, PartialEq)]
288pub enum Operation {
289 /// A text operation.
290 Buffer(text::Operation),
291
292 /// An update to the buffer's diagnostics.
293 UpdateDiagnostics {
294 /// The id of the language server that produced the new diagnostics.
295 server_id: LanguageServerId,
296 /// The diagnostics.
297 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
298 /// The buffer's lamport timestamp.
299 lamport_timestamp: clock::Lamport,
300 },
301
302 /// An update to the most recent selections in this buffer.
303 UpdateSelections {
304 /// The selections.
305 selections: Arc<[Selection<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 /// Whether the selections are in 'line mode'.
309 line_mode: bool,
310 /// The [`CursorShape`] associated with these selections.
311 cursor_shape: CursorShape,
312 },
313
314 /// An update to the characters that should trigger autocompletion
315 /// for this buffer.
316 UpdateCompletionTriggers {
317 /// The characters that trigger autocompletion.
318 triggers: Vec<String>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 /// The language server ID.
322 server_id: LanguageServerId,
323 },
324
325 /// An update to the line ending type of this buffer.
326 UpdateLineEnding {
327 /// The line ending type.
328 line_ending: LineEnding,
329 /// The buffer's lamport timestamp.
330 lamport_timestamp: clock::Lamport,
331 },
332}
333
334/// An event that occurs in a buffer.
335#[derive(Clone, Debug, PartialEq)]
336pub enum BufferEvent {
337 /// The buffer was changed in a way that must be
338 /// propagated to its other replicas.
339 Operation {
340 operation: Operation,
341 is_local: bool,
342 },
343 /// The buffer was edited.
344 Edited,
345 /// The buffer's `dirty` bit changed.
346 DirtyChanged,
347 /// The buffer was saved.
348 Saved,
349 /// The buffer's file was changed on disk.
350 FileHandleChanged,
351 /// The buffer was reloaded.
352 Reloaded,
353 /// The buffer is in need of a reload
354 ReloadNeeded,
355 /// The buffer's language was changed.
356 /// The boolean indicates whether this buffer did not have a language before, but does now.
357 LanguageChanged(bool),
358 /// The buffer's syntax trees were updated.
359 Reparsed,
360 /// The buffer's diagnostics were updated.
361 DiagnosticsUpdated,
362 /// The buffer gained or lost editing capabilities.
363 CapabilityChanged,
364}
365
366/// The file associated with a buffer.
367pub trait File: Send + Sync + Any {
368 /// Returns the [`LocalFile`] associated with this file, if the
369 /// file is local.
370 fn as_local(&self) -> Option<&dyn LocalFile>;
371
372 /// Returns whether this file is local.
373 fn is_local(&self) -> bool {
374 self.as_local().is_some()
375 }
376
377 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
378 /// only available in some states, such as modification time.
379 fn disk_state(&self) -> DiskState;
380
381 /// Returns the path of this file relative to the worktree's root directory.
382 fn path(&self) -> &Arc<RelPath>;
383
384 /// Returns the path of this file relative to the worktree's parent directory (this means it
385 /// includes the name of the worktree's root folder).
386 fn full_path(&self, cx: &App) -> PathBuf;
387
388 /// Returns the path style of this file.
389 fn path_style(&self, cx: &App) -> PathStyle;
390
391 /// Returns the last component of this handle's absolute path. If this handle refers to the root
392 /// of its worktree, then this method will return the name of the worktree itself.
393 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
394
395 /// Returns the id of the worktree to which this file belongs.
396 ///
397 /// This is needed for looking up project-specific settings.
398 fn worktree_id(&self, cx: &App) -> WorktreeId;
399
400 /// Converts this file into a protobuf message.
401 fn to_proto(&self, cx: &App) -> rpc::proto::File;
402
403 /// Return whether Zed considers this to be a private file.
404 fn is_private(&self) -> bool;
405}
406
407/// The file's storage status - whether it's stored (`Present`), and if so when it was last
408/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
409/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
410/// indicator for new files.
411#[derive(Copy, Clone, Debug, PartialEq)]
412pub enum DiskState {
413 /// File created in Zed that has not been saved.
414 New,
415 /// File present on the filesystem.
416 Present { mtime: MTime },
417 /// Deleted file that was previously present.
418 Deleted,
419}
420
421impl DiskState {
422 /// Returns the file's last known modification time on disk.
423 pub fn mtime(self) -> Option<MTime> {
424 match self {
425 DiskState::New => None,
426 DiskState::Present { mtime } => Some(mtime),
427 DiskState::Deleted => None,
428 }
429 }
430
431 pub fn exists(&self) -> bool {
432 match self {
433 DiskState::New => false,
434 DiskState::Present { .. } => true,
435 DiskState::Deleted => false,
436 }
437 }
438}
439
440/// The file associated with a buffer, in the case where the file is on the local disk.
441pub trait LocalFile: File {
442 /// Returns the absolute path of this file
443 fn abs_path(&self, cx: &App) -> PathBuf;
444
445 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
446 fn load(&self, cx: &App) -> Task<Result<String>>;
447
448 /// Loads the file's contents from disk.
449 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
450}
451
452/// The auto-indent behavior associated with an editing operation.
453/// For some editing operations, each affected line of text has its
454/// indentation recomputed. For other operations, the entire block
455/// of edited text is adjusted uniformly.
456#[derive(Clone, Debug)]
457pub enum AutoindentMode {
458 /// Indent each line of inserted text.
459 EachLine,
460 /// Apply the same indentation adjustment to all of the lines
461 /// in a given insertion.
462 Block {
463 /// The original indentation column of the first line of each
464 /// insertion, if it has been copied.
465 ///
466 /// Knowing this makes it possible to preserve the relative indentation
467 /// of every line in the insertion from when it was copied.
468 ///
469 /// If the original indent column is `a`, and the first line of insertion
470 /// is then auto-indented to column `b`, then every other line of
471 /// the insertion will be auto-indented to column `b - a`
472 original_indent_columns: Vec<Option<u32>>,
473 },
474}
475
476#[derive(Clone)]
477struct AutoindentRequest {
478 before_edit: BufferSnapshot,
479 entries: Vec<AutoindentRequestEntry>,
480 is_block_mode: bool,
481 ignore_empty_lines: bool,
482}
483
484#[derive(Debug, Clone)]
485struct AutoindentRequestEntry {
486 /// A range of the buffer whose indentation should be adjusted.
487 range: Range<Anchor>,
488 /// Whether or not these lines should be considered brand new, for the
489 /// purpose of auto-indent. When text is not new, its indentation will
490 /// only be adjusted if the suggested indentation level has *changed*
491 /// since the edit was made.
492 first_line_is_new: bool,
493 indent_size: IndentSize,
494 original_indent_column: Option<u32>,
495}
496
497#[derive(Debug)]
498struct IndentSuggestion {
499 basis_row: u32,
500 delta: Ordering,
501 within_error: bool,
502}
503
504struct BufferChunkHighlights<'a> {
505 captures: SyntaxMapCaptures<'a>,
506 next_capture: Option<SyntaxMapCapture<'a>>,
507 stack: Vec<(usize, HighlightId)>,
508 highlight_maps: Vec<HighlightMap>,
509}
510
511/// An iterator that yields chunks of a buffer's text, along with their
512/// syntax highlights and diagnostic status.
513pub struct BufferChunks<'a> {
514 buffer_snapshot: Option<&'a BufferSnapshot>,
515 range: Range<usize>,
516 chunks: text::Chunks<'a>,
517 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
518 error_depth: usize,
519 warning_depth: usize,
520 information_depth: usize,
521 hint_depth: usize,
522 unnecessary_depth: usize,
523 underline: bool,
524 highlights: Option<BufferChunkHighlights<'a>>,
525}
526
527/// A chunk of a buffer's text, along with its syntax highlight and
528/// diagnostic status.
529#[derive(Clone, Debug, Default)]
530pub struct Chunk<'a> {
531 /// The text of the chunk.
532 pub text: &'a str,
533 /// The syntax highlighting style of the chunk.
534 pub syntax_highlight_id: Option<HighlightId>,
535 /// The highlight style that has been applied to this chunk in
536 /// the editor.
537 pub highlight_style: Option<HighlightStyle>,
538 /// The severity of diagnostic associated with this chunk, if any.
539 pub diagnostic_severity: Option<DiagnosticSeverity>,
540 /// A bitset of which characters are tabs in this string.
541 pub tabs: u128,
542 /// Bitmap of character indices in this chunk
543 pub chars: u128,
544 /// Whether this chunk of text is marked as unnecessary.
545 pub is_unnecessary: bool,
546 /// Whether this chunk of text was originally a tab character.
547 pub is_tab: bool,
548 /// Whether this chunk of text was originally an inlay.
549 pub is_inlay: bool,
550 /// Whether to underline the corresponding text range in the editor.
551 pub underline: bool,
552}
553
554/// A set of edits to a given version of a buffer, computed asynchronously.
555#[derive(Debug)]
556pub struct Diff {
557 pub base_version: clock::Global,
558 pub line_ending: LineEnding,
559 pub edits: Vec<(Range<usize>, Arc<str>)>,
560}
561
562#[derive(Debug, Clone, Copy)]
563pub(crate) struct DiagnosticEndpoint {
564 offset: usize,
565 is_start: bool,
566 underline: bool,
567 severity: DiagnosticSeverity,
568 is_unnecessary: bool,
569}
570
571/// A class of characters, used for characterizing a run of text.
572#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
573pub enum CharKind {
574 /// Whitespace.
575 Whitespace,
576 /// Punctuation.
577 Punctuation,
578 /// Word.
579 Word,
580}
581
582/// Context for character classification within a specific scope.
583#[derive(Copy, Clone, Eq, PartialEq, Debug)]
584pub enum CharScopeContext {
585 /// Character classification for completion queries.
586 ///
587 /// This context treats certain characters as word constituents that would
588 /// normally be considered punctuation, such as '-' in Tailwind classes
589 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
590 Completion,
591 /// Character classification for linked edits.
592 ///
593 /// This context handles characters that should be treated as part of
594 /// identifiers during linked editing operations, such as '.' in JSX
595 /// component names like `<Animated.View>`.
596 LinkedEdit,
597}
598
599/// A runnable is a set of data about a region that could be resolved into a task
600pub struct Runnable {
601 pub tags: SmallVec<[RunnableTag; 1]>,
602 pub language: Arc<Language>,
603 pub buffer: BufferId,
604}
605
606#[derive(Default, Clone, Debug)]
607pub struct HighlightedText {
608 pub text: SharedString,
609 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
610}
611
612#[derive(Default, Debug)]
613struct HighlightedTextBuilder {
614 pub text: String,
615 highlights: Vec<(Range<usize>, HighlightStyle)>,
616}
617
618impl HighlightedText {
619 pub fn from_buffer_range<T: ToOffset>(
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) -> Self {
626 let mut highlighted_text = HighlightedTextBuilder::default();
627 highlighted_text.add_text_from_buffer_range(
628 range,
629 snapshot,
630 syntax_snapshot,
631 override_style,
632 syntax_theme,
633 );
634 highlighted_text.build()
635 }
636
637 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
638 gpui::StyledText::new(self.text.clone())
639 .with_default_highlights(default_style, self.highlights.iter().cloned())
640 }
641
642 /// Returns the first line without leading whitespace unless highlighted
643 /// and a boolean indicating if there are more lines after
644 pub fn first_line_preview(self) -> (Self, bool) {
645 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
646 let first_line = &self.text[..newline_ix];
647
648 // Trim leading whitespace, unless an edit starts prior to it.
649 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
650 if let Some((first_highlight_range, _)) = self.highlights.first() {
651 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
652 }
653
654 let preview_text = &first_line[preview_start_ix..];
655 let preview_highlights = self
656 .highlights
657 .into_iter()
658 .skip_while(|(range, _)| range.end <= preview_start_ix)
659 .take_while(|(range, _)| range.start < newline_ix)
660 .filter_map(|(mut range, highlight)| {
661 range.start = range.start.saturating_sub(preview_start_ix);
662 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
663 if range.is_empty() {
664 None
665 } else {
666 Some((range, highlight))
667 }
668 });
669
670 let preview = Self {
671 text: SharedString::new(preview_text),
672 highlights: preview_highlights.collect(),
673 };
674
675 (preview, self.text.len() > newline_ix)
676 }
677}
678
679impl HighlightedTextBuilder {
680 pub fn build(self) -> HighlightedText {
681 HighlightedText {
682 text: self.text.into(),
683 highlights: self.highlights,
684 }
685 }
686
687 pub fn add_text_from_buffer_range<T: ToOffset>(
688 &mut self,
689 range: Range<T>,
690 snapshot: &text::BufferSnapshot,
691 syntax_snapshot: &SyntaxSnapshot,
692 override_style: Option<HighlightStyle>,
693 syntax_theme: &SyntaxTheme,
694 ) {
695 let range = range.to_offset(snapshot);
696 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
697 let start = self.text.len();
698 self.text.push_str(chunk.text);
699 let end = self.text.len();
700
701 if let Some(highlight_style) = chunk
702 .syntax_highlight_id
703 .and_then(|id| id.style(syntax_theme))
704 {
705 let highlight_style = override_style.map_or(highlight_style, |override_style| {
706 highlight_style.highlight(override_style)
707 });
708 self.highlights.push((start..end, highlight_style));
709 } else if let Some(override_style) = override_style {
710 self.highlights.push((start..end, override_style));
711 }
712 }
713 }
714
715 fn highlighted_chunks<'a>(
716 range: Range<usize>,
717 snapshot: &'a text::BufferSnapshot,
718 syntax_snapshot: &'a SyntaxSnapshot,
719 ) -> BufferChunks<'a> {
720 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
721 grammar
722 .highlights_config
723 .as_ref()
724 .map(|config| &config.query)
725 });
726
727 let highlight_maps = captures
728 .grammars()
729 .iter()
730 .map(|grammar| grammar.highlight_map())
731 .collect();
732
733 BufferChunks::new(
734 snapshot.as_rope(),
735 range,
736 Some((captures, highlight_maps)),
737 false,
738 None,
739 )
740 }
741}
742
743#[derive(Clone)]
744pub struct EditPreview {
745 old_snapshot: text::BufferSnapshot,
746 applied_edits_snapshot: text::BufferSnapshot,
747 syntax_snapshot: SyntaxSnapshot,
748}
749
750impl EditPreview {
751 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
752 let (first, _) = edits.first()?;
753 let (last, _) = edits.last()?;
754
755 let start = first.start.to_point(&self.old_snapshot);
756 let old_end = last.end.to_point(&self.old_snapshot);
757 let new_end = last
758 .end
759 .bias_right(&self.old_snapshot)
760 .to_point(&self.applied_edits_snapshot);
761
762 let start = Point::new(start.row.saturating_sub(3), 0);
763 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
764 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
765
766 Some(unified_diff(
767 &self
768 .old_snapshot
769 .text_for_range(start..old_end)
770 .collect::<String>(),
771 &self
772 .applied_edits_snapshot
773 .text_for_range(start..new_end)
774 .collect::<String>(),
775 ))
776 }
777
778 pub fn highlight_edits(
779 &self,
780 current_snapshot: &BufferSnapshot,
781 edits: &[(Range<Anchor>, impl AsRef<str>)],
782 include_deletions: bool,
783 cx: &App,
784 ) -> HighlightedText {
785 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
786 return HighlightedText::default();
787 };
788
789 let mut highlighted_text = HighlightedTextBuilder::default();
790
791 let visible_range_in_preview_snapshot =
792 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
793 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
794
795 let insertion_highlight_style = HighlightStyle {
796 background_color: Some(cx.theme().status().created_background),
797 ..Default::default()
798 };
799 let deletion_highlight_style = HighlightStyle {
800 background_color: Some(cx.theme().status().deleted_background),
801 ..Default::default()
802 };
803 let syntax_theme = cx.theme().syntax();
804
805 for (range, edit_text) in edits {
806 let edit_new_end_in_preview_snapshot = range
807 .end
808 .bias_right(&self.old_snapshot)
809 .to_offset(&self.applied_edits_snapshot);
810 let edit_start_in_preview_snapshot =
811 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
812
813 let unchanged_range_in_preview_snapshot =
814 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
815 if !unchanged_range_in_preview_snapshot.is_empty() {
816 highlighted_text.add_text_from_buffer_range(
817 unchanged_range_in_preview_snapshot,
818 &self.applied_edits_snapshot,
819 &self.syntax_snapshot,
820 None,
821 syntax_theme,
822 );
823 }
824
825 let range_in_current_snapshot = range.to_offset(current_snapshot);
826 if include_deletions && !range_in_current_snapshot.is_empty() {
827 highlighted_text.add_text_from_buffer_range(
828 range_in_current_snapshot,
829 ¤t_snapshot.text,
830 ¤t_snapshot.syntax,
831 Some(deletion_highlight_style),
832 syntax_theme,
833 );
834 }
835
836 if !edit_text.as_ref().is_empty() {
837 highlighted_text.add_text_from_buffer_range(
838 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
839 &self.applied_edits_snapshot,
840 &self.syntax_snapshot,
841 Some(insertion_highlight_style),
842 syntax_theme,
843 );
844 }
845
846 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
847 }
848
849 highlighted_text.add_text_from_buffer_range(
850 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
851 &self.applied_edits_snapshot,
852 &self.syntax_snapshot,
853 None,
854 syntax_theme,
855 );
856
857 highlighted_text.build()
858 }
859
860 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
861 cx.new(|cx| {
862 let mut buffer = Buffer::local_normalized(
863 self.applied_edits_snapshot.as_rope().clone(),
864 self.applied_edits_snapshot.line_ending(),
865 cx,
866 );
867 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
868 buffer
869 })
870 }
871
872 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
873 let (first, _) = edits.first()?;
874 let (last, _) = edits.last()?;
875
876 let start = first
877 .start
878 .bias_left(&self.old_snapshot)
879 .to_point(&self.applied_edits_snapshot);
880 let end = last
881 .end
882 .bias_right(&self.old_snapshot)
883 .to_point(&self.applied_edits_snapshot);
884
885 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
886 let range = Point::new(start.row, 0)
887 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
888
889 Some(range)
890 }
891}
892
893#[derive(Clone, Debug, PartialEq, Eq)]
894pub struct BracketMatch<T> {
895 pub open_range: Range<T>,
896 pub close_range: Range<T>,
897 pub newline_only: bool,
898 pub syntax_layer_depth: usize,
899 pub color_index: Option<usize>,
900}
901
902impl<T> BracketMatch<T> {
903 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
904 (self.open_range, self.close_range)
905 }
906}
907
908impl Buffer {
909 /// Create a new buffer with the given base text.
910 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
911 Self::build(
912 TextBuffer::new(
913 ReplicaId::LOCAL,
914 cx.entity_id().as_non_zero_u64().into(),
915 base_text.into(),
916 ),
917 None,
918 Capability::ReadWrite,
919 )
920 }
921
922 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
923 pub fn local_normalized(
924 base_text_normalized: Rope,
925 line_ending: LineEnding,
926 cx: &Context<Self>,
927 ) -> Self {
928 Self::build(
929 TextBuffer::new_normalized(
930 ReplicaId::LOCAL,
931 cx.entity_id().as_non_zero_u64().into(),
932 line_ending,
933 base_text_normalized,
934 ),
935 None,
936 Capability::ReadWrite,
937 )
938 }
939
940 /// Create a new buffer that is a replica of a remote buffer.
941 pub fn remote(
942 remote_id: BufferId,
943 replica_id: ReplicaId,
944 capability: Capability,
945 base_text: impl Into<String>,
946 ) -> Self {
947 Self::build(
948 TextBuffer::new(replica_id, remote_id, base_text.into()),
949 None,
950 capability,
951 )
952 }
953
954 /// Create a new buffer that is a replica of a remote buffer, populating its
955 /// state from the given protobuf message.
956 pub fn from_proto(
957 replica_id: ReplicaId,
958 capability: Capability,
959 message: proto::BufferState,
960 file: Option<Arc<dyn File>>,
961 ) -> Result<Self> {
962 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
963 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
964 let mut this = Self::build(buffer, file, capability);
965 this.text.set_line_ending(proto::deserialize_line_ending(
966 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
967 ));
968 this.saved_version = proto::deserialize_version(&message.saved_version);
969 this.saved_mtime = message.saved_mtime.map(|time| time.into());
970 Ok(this)
971 }
972
973 /// Serialize the buffer's state to a protobuf message.
974 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
975 proto::BufferState {
976 id: self.remote_id().into(),
977 file: self.file.as_ref().map(|f| f.to_proto(cx)),
978 base_text: self.base_text().to_string(),
979 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
980 saved_version: proto::serialize_version(&self.saved_version),
981 saved_mtime: self.saved_mtime.map(|time| time.into()),
982 }
983 }
984
985 /// Serialize as protobufs all of the changes to the buffer since the given version.
986 pub fn serialize_ops(
987 &self,
988 since: Option<clock::Global>,
989 cx: &App,
990 ) -> Task<Vec<proto::Operation>> {
991 let mut operations = Vec::new();
992 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
993
994 operations.extend(self.remote_selections.iter().map(|(_, set)| {
995 proto::serialize_operation(&Operation::UpdateSelections {
996 selections: set.selections.clone(),
997 lamport_timestamp: set.lamport_timestamp,
998 line_mode: set.line_mode,
999 cursor_shape: set.cursor_shape,
1000 })
1001 }));
1002
1003 for (server_id, diagnostics) in &self.diagnostics {
1004 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1005 lamport_timestamp: self.diagnostics_timestamp,
1006 server_id: *server_id,
1007 diagnostics: diagnostics.iter().cloned().collect(),
1008 }));
1009 }
1010
1011 for (server_id, completions) in &self.completion_triggers_per_language_server {
1012 operations.push(proto::serialize_operation(
1013 &Operation::UpdateCompletionTriggers {
1014 triggers: completions.iter().cloned().collect(),
1015 lamport_timestamp: self.completion_triggers_timestamp,
1016 server_id: *server_id,
1017 },
1018 ));
1019 }
1020
1021 let text_operations = self.text.operations().clone();
1022 cx.background_spawn(async move {
1023 let since = since.unwrap_or_default();
1024 operations.extend(
1025 text_operations
1026 .iter()
1027 .filter(|(_, op)| !since.observed(op.timestamp()))
1028 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1029 );
1030 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1031 operations
1032 })
1033 }
1034
1035 /// Assign a language to the buffer, returning the buffer.
1036 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1037 self.set_language_async(Some(language), cx);
1038 self
1039 }
1040
1041 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1042 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1043 self.set_language(Some(language), cx);
1044 self
1045 }
1046
1047 /// Returns the [`Capability`] of this buffer.
1048 pub fn capability(&self) -> Capability {
1049 self.capability
1050 }
1051
1052 /// Whether this buffer can only be read.
1053 pub fn read_only(&self) -> bool {
1054 self.capability == Capability::ReadOnly
1055 }
1056
1057 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1058 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1059 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1060 let snapshot = buffer.snapshot();
1061 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1062 let tree_sitter_data = TreeSitterData::new(snapshot);
1063 Self {
1064 saved_mtime,
1065 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1066 saved_version: buffer.version(),
1067 preview_version: buffer.version(),
1068 reload_task: None,
1069 transaction_depth: 0,
1070 was_dirty_before_starting_transaction: None,
1071 has_unsaved_edits: Cell::new((buffer.version(), false)),
1072 text: buffer,
1073 branch_state: None,
1074 file,
1075 capability,
1076 syntax_map,
1077 reparse: None,
1078 non_text_state_update_count: 0,
1079 sync_parse_timeout: Duration::from_millis(1),
1080 parse_status: watch::channel(ParseStatus::Idle),
1081 autoindent_requests: Default::default(),
1082 wait_for_autoindent_txs: Default::default(),
1083 pending_autoindent: Default::default(),
1084 language: None,
1085 remote_selections: Default::default(),
1086 diagnostics: Default::default(),
1087 diagnostics_timestamp: Lamport::MIN,
1088 completion_triggers: Default::default(),
1089 completion_triggers_per_language_server: Default::default(),
1090 completion_triggers_timestamp: Lamport::MIN,
1091 deferred_ops: OperationQueue::new(),
1092 has_conflict: false,
1093 change_bits: Default::default(),
1094 _subscriptions: Vec::new(),
1095 }
1096 }
1097
1098 pub fn build_snapshot(
1099 text: Rope,
1100 language: Option<Arc<Language>>,
1101 language_registry: Option<Arc<LanguageRegistry>>,
1102 cx: &mut App,
1103 ) -> impl Future<Output = BufferSnapshot> + use<> {
1104 let entity_id = cx.reserve_entity::<Self>().entity_id();
1105 let buffer_id = entity_id.as_non_zero_u64().into();
1106 async move {
1107 let text =
1108 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1109 .snapshot();
1110 let mut syntax = SyntaxMap::new(&text).snapshot();
1111 if let Some(language) = language.clone() {
1112 let language_registry = language_registry.clone();
1113 syntax.reparse(&text, language_registry, language);
1114 }
1115 let tree_sitter_data = TreeSitterData::new(text.clone());
1116 BufferSnapshot {
1117 text,
1118 syntax,
1119 file: None,
1120 diagnostics: Default::default(),
1121 remote_selections: Default::default(),
1122 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1123 language,
1124 non_text_state_update_count: 0,
1125 }
1126 }
1127 }
1128
1129 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1130 let entity_id = cx.reserve_entity::<Self>().entity_id();
1131 let buffer_id = entity_id.as_non_zero_u64().into();
1132 let text = TextBuffer::new_normalized(
1133 ReplicaId::LOCAL,
1134 buffer_id,
1135 Default::default(),
1136 Rope::new(),
1137 )
1138 .snapshot();
1139 let syntax = SyntaxMap::new(&text).snapshot();
1140 let tree_sitter_data = TreeSitterData::new(text.clone());
1141 BufferSnapshot {
1142 text,
1143 syntax,
1144 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1145 file: None,
1146 diagnostics: Default::default(),
1147 remote_selections: Default::default(),
1148 language: None,
1149 non_text_state_update_count: 0,
1150 }
1151 }
1152
1153 #[cfg(any(test, feature = "test-support"))]
1154 pub fn build_snapshot_sync(
1155 text: Rope,
1156 language: Option<Arc<Language>>,
1157 language_registry: Option<Arc<LanguageRegistry>>,
1158 cx: &mut App,
1159 ) -> BufferSnapshot {
1160 let entity_id = cx.reserve_entity::<Self>().entity_id();
1161 let buffer_id = entity_id.as_non_zero_u64().into();
1162 let text =
1163 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1164 .snapshot();
1165 let mut syntax = SyntaxMap::new(&text).snapshot();
1166 if let Some(language) = language.clone() {
1167 syntax.reparse(&text, language_registry, language);
1168 }
1169 let tree_sitter_data = TreeSitterData::new(text.clone());
1170 BufferSnapshot {
1171 text,
1172 syntax,
1173 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1174 file: None,
1175 diagnostics: Default::default(),
1176 remote_selections: Default::default(),
1177 language,
1178 non_text_state_update_count: 0,
1179 }
1180 }
1181
1182 /// Retrieve a snapshot of the buffer's current state. This is computationally
1183 /// cheap, and allows reading from the buffer on a background thread.
1184 pub fn snapshot(&self) -> BufferSnapshot {
1185 let text = self.text.snapshot();
1186 let mut syntax_map = self.syntax_map.lock();
1187 syntax_map.interpolate(&text);
1188 let syntax = syntax_map.snapshot();
1189
1190 BufferSnapshot {
1191 text,
1192 syntax,
1193 tree_sitter_data: self.tree_sitter_data.clone(),
1194 file: self.file.clone(),
1195 remote_selections: self.remote_selections.clone(),
1196 diagnostics: self.diagnostics.clone(),
1197 language: self.language.clone(),
1198 non_text_state_update_count: self.non_text_state_update_count,
1199 }
1200 }
1201
1202 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1203 let this = cx.entity();
1204 cx.new(|cx| {
1205 let mut branch = Self {
1206 branch_state: Some(BufferBranchState {
1207 base_buffer: this.clone(),
1208 merged_operations: Default::default(),
1209 }),
1210 language: self.language.clone(),
1211 has_conflict: self.has_conflict,
1212 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1213 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1214 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1215 };
1216 if let Some(language_registry) = self.language_registry() {
1217 branch.set_language_registry(language_registry);
1218 }
1219
1220 // Reparse the branch buffer so that we get syntax highlighting immediately.
1221 branch.reparse(cx, true);
1222
1223 branch
1224 })
1225 }
1226
1227 pub fn preview_edits(
1228 &self,
1229 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1230 cx: &App,
1231 ) -> Task<EditPreview> {
1232 let registry = self.language_registry();
1233 let language = self.language().cloned();
1234 let old_snapshot = self.text.snapshot();
1235 let mut branch_buffer = self.text.branch();
1236 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1237 cx.background_spawn(async move {
1238 if !edits.is_empty() {
1239 if let Some(language) = language.clone() {
1240 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1241 }
1242
1243 branch_buffer.edit(edits.iter().cloned());
1244 let snapshot = branch_buffer.snapshot();
1245 syntax_snapshot.interpolate(&snapshot);
1246
1247 if let Some(language) = language {
1248 syntax_snapshot.reparse(&snapshot, registry, language);
1249 }
1250 }
1251 EditPreview {
1252 old_snapshot,
1253 applied_edits_snapshot: branch_buffer.snapshot(),
1254 syntax_snapshot,
1255 }
1256 })
1257 }
1258
1259 /// Applies all of the changes in this buffer that intersect any of the
1260 /// given `ranges` to its base buffer.
1261 ///
1262 /// If `ranges` is empty, then all changes will be applied. This buffer must
1263 /// be a branch buffer to call this method.
1264 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1265 let Some(base_buffer) = self.base_buffer() else {
1266 debug_panic!("not a branch buffer");
1267 return;
1268 };
1269
1270 let mut ranges = if ranges.is_empty() {
1271 &[0..usize::MAX]
1272 } else {
1273 ranges.as_slice()
1274 }
1275 .iter()
1276 .peekable();
1277
1278 let mut edits = Vec::new();
1279 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1280 let mut is_included = false;
1281 while let Some(range) = ranges.peek() {
1282 if range.end < edit.new.start {
1283 ranges.next().unwrap();
1284 } else {
1285 if range.start <= edit.new.end {
1286 is_included = true;
1287 }
1288 break;
1289 }
1290 }
1291
1292 if is_included {
1293 edits.push((
1294 edit.old.clone(),
1295 self.text_for_range(edit.new.clone()).collect::<String>(),
1296 ));
1297 }
1298 }
1299
1300 let operation = base_buffer.update(cx, |base_buffer, cx| {
1301 // cx.emit(BufferEvent::DiffBaseChanged);
1302 base_buffer.edit(edits, None, cx)
1303 });
1304
1305 if let Some(operation) = operation
1306 && let Some(BufferBranchState {
1307 merged_operations, ..
1308 }) = &mut self.branch_state
1309 {
1310 merged_operations.push(operation);
1311 }
1312 }
1313
1314 fn on_base_buffer_event(
1315 &mut self,
1316 _: Entity<Buffer>,
1317 event: &BufferEvent,
1318 cx: &mut Context<Self>,
1319 ) {
1320 let BufferEvent::Operation { operation, .. } = event else {
1321 return;
1322 };
1323 let Some(BufferBranchState {
1324 merged_operations, ..
1325 }) = &mut self.branch_state
1326 else {
1327 return;
1328 };
1329
1330 let mut operation_to_undo = None;
1331 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1332 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1333 {
1334 merged_operations.remove(ix);
1335 operation_to_undo = Some(operation.timestamp);
1336 }
1337
1338 self.apply_ops([operation.clone()], cx);
1339
1340 if let Some(timestamp) = operation_to_undo {
1341 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1342 self.undo_operations(counts, cx);
1343 }
1344 }
1345
1346 #[cfg(test)]
1347 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1348 &self.text
1349 }
1350
1351 /// Retrieve a snapshot of the buffer's raw text, without any
1352 /// language-related state like the syntax tree or diagnostics.
1353 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1354 self.text.snapshot()
1355 }
1356
1357 /// The file associated with the buffer, if any.
1358 pub fn file(&self) -> Option<&Arc<dyn File>> {
1359 self.file.as_ref()
1360 }
1361
1362 /// The version of the buffer that was last saved or reloaded from disk.
1363 pub fn saved_version(&self) -> &clock::Global {
1364 &self.saved_version
1365 }
1366
1367 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1368 pub fn saved_mtime(&self) -> Option<MTime> {
1369 self.saved_mtime
1370 }
1371
1372 /// Assign a language to the buffer.
1373 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1374 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1375 }
1376
1377 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1378 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1379 self.set_language_(language, true, cx);
1380 }
1381
1382 fn set_language_(
1383 &mut self,
1384 language: Option<Arc<Language>>,
1385 may_block: bool,
1386 cx: &mut Context<Self>,
1387 ) {
1388 self.non_text_state_update_count += 1;
1389 self.syntax_map.lock().clear(&self.text);
1390 let old_language = std::mem::replace(&mut self.language, language);
1391 self.was_changed();
1392 self.reparse(cx, may_block);
1393 let has_fresh_language =
1394 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1395 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1396 }
1397
1398 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1399 /// other languages if parts of the buffer are written in different languages.
1400 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1401 self.syntax_map
1402 .lock()
1403 .set_language_registry(language_registry);
1404 }
1405
1406 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1407 self.syntax_map.lock().language_registry()
1408 }
1409
1410 /// Assign the line ending type to the buffer.
1411 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1412 self.text.set_line_ending(line_ending);
1413
1414 let lamport_timestamp = self.text.lamport_clock.tick();
1415 self.send_operation(
1416 Operation::UpdateLineEnding {
1417 line_ending,
1418 lamport_timestamp,
1419 },
1420 true,
1421 cx,
1422 );
1423 }
1424
1425 /// Assign the buffer a new [`Capability`].
1426 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1427 if self.capability != capability {
1428 self.capability = capability;
1429 cx.emit(BufferEvent::CapabilityChanged)
1430 }
1431 }
1432
1433 /// This method is called to signal that the buffer has been saved.
1434 pub fn did_save(
1435 &mut self,
1436 version: clock::Global,
1437 mtime: Option<MTime>,
1438 cx: &mut Context<Self>,
1439 ) {
1440 self.saved_version = version.clone();
1441 self.has_unsaved_edits.set((version, false));
1442 self.has_conflict = false;
1443 self.saved_mtime = mtime;
1444 self.was_changed();
1445 cx.emit(BufferEvent::Saved);
1446 cx.notify();
1447 }
1448
1449 /// Reloads the contents of the buffer from disk.
1450 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1451 let (tx, rx) = futures::channel::oneshot::channel();
1452 let prev_version = self.text.version();
1453 self.reload_task = Some(cx.spawn(async move |this, cx| {
1454 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1455 let file = this.file.as_ref()?.as_local()?;
1456
1457 Some((file.disk_state().mtime(), file.load(cx)))
1458 })?
1459 else {
1460 return Ok(());
1461 };
1462
1463 let new_text = new_text.await?;
1464 let diff = this
1465 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1466 .await;
1467 this.update(cx, |this, cx| {
1468 if this.version() == diff.base_version {
1469 this.finalize_last_transaction();
1470 this.apply_diff(diff, cx);
1471 tx.send(this.finalize_last_transaction().cloned()).ok();
1472 this.has_conflict = false;
1473 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1474 } else {
1475 if !diff.edits.is_empty()
1476 || this
1477 .edits_since::<usize>(&diff.base_version)
1478 .next()
1479 .is_some()
1480 {
1481 this.has_conflict = true;
1482 }
1483
1484 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1485 }
1486
1487 this.reload_task.take();
1488 })
1489 }));
1490 rx
1491 }
1492
1493 /// This method is called to signal that the buffer has been reloaded.
1494 pub fn did_reload(
1495 &mut self,
1496 version: clock::Global,
1497 line_ending: LineEnding,
1498 mtime: Option<MTime>,
1499 cx: &mut Context<Self>,
1500 ) {
1501 self.saved_version = version;
1502 self.has_unsaved_edits
1503 .set((self.saved_version.clone(), false));
1504 self.text.set_line_ending(line_ending);
1505 self.saved_mtime = mtime;
1506 cx.emit(BufferEvent::Reloaded);
1507 cx.notify();
1508 }
1509
1510 /// Updates the [`File`] backing this buffer. This should be called when
1511 /// the file has changed or has been deleted.
1512 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1513 let was_dirty = self.is_dirty();
1514 let mut file_changed = false;
1515
1516 if let Some(old_file) = self.file.as_ref() {
1517 if new_file.path() != old_file.path() {
1518 file_changed = true;
1519 }
1520
1521 let old_state = old_file.disk_state();
1522 let new_state = new_file.disk_state();
1523 if old_state != new_state {
1524 file_changed = true;
1525 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1526 cx.emit(BufferEvent::ReloadNeeded)
1527 }
1528 }
1529 } else {
1530 file_changed = true;
1531 };
1532
1533 self.file = Some(new_file);
1534 if file_changed {
1535 self.was_changed();
1536 self.non_text_state_update_count += 1;
1537 if was_dirty != self.is_dirty() {
1538 cx.emit(BufferEvent::DirtyChanged);
1539 }
1540 cx.emit(BufferEvent::FileHandleChanged);
1541 cx.notify();
1542 }
1543 }
1544
1545 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1546 Some(self.branch_state.as_ref()?.base_buffer.clone())
1547 }
1548
1549 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1550 pub fn language(&self) -> Option<&Arc<Language>> {
1551 self.language.as_ref()
1552 }
1553
1554 /// Returns the [`Language`] at the given location.
1555 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1556 let offset = position.to_offset(self);
1557 let mut is_first = true;
1558 let start_anchor = self.anchor_before(offset);
1559 let end_anchor = self.anchor_after(offset);
1560 self.syntax_map
1561 .lock()
1562 .layers_for_range(offset..offset, &self.text, false)
1563 .filter(|layer| {
1564 if is_first {
1565 is_first = false;
1566 return true;
1567 }
1568
1569 layer
1570 .included_sub_ranges
1571 .map(|sub_ranges| {
1572 sub_ranges.iter().any(|sub_range| {
1573 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1574 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1575 !is_before_start && !is_after_end
1576 })
1577 })
1578 .unwrap_or(true)
1579 })
1580 .last()
1581 .map(|info| info.language.clone())
1582 .or_else(|| self.language.clone())
1583 }
1584
1585 /// Returns each [`Language`] for the active syntax layers at the given location.
1586 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1587 let offset = position.to_offset(self);
1588 let mut languages: Vec<Arc<Language>> = self
1589 .syntax_map
1590 .lock()
1591 .layers_for_range(offset..offset, &self.text, false)
1592 .map(|info| info.language.clone())
1593 .collect();
1594
1595 if languages.is_empty()
1596 && let Some(buffer_language) = self.language()
1597 {
1598 languages.push(buffer_language.clone());
1599 }
1600
1601 languages
1602 }
1603
1604 /// An integer version number that accounts for all updates besides
1605 /// the buffer's text itself (which is versioned via a version vector).
1606 pub fn non_text_state_update_count(&self) -> usize {
1607 self.non_text_state_update_count
1608 }
1609
1610 /// Whether the buffer is being parsed in the background.
1611 #[cfg(any(test, feature = "test-support"))]
1612 pub fn is_parsing(&self) -> bool {
1613 self.reparse.is_some()
1614 }
1615
1616 /// Indicates whether the buffer contains any regions that may be
1617 /// written in a language that hasn't been loaded yet.
1618 pub fn contains_unknown_injections(&self) -> bool {
1619 self.syntax_map.lock().contains_unknown_injections()
1620 }
1621
1622 #[cfg(any(test, feature = "test-support"))]
1623 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1624 self.sync_parse_timeout = timeout;
1625 }
1626
1627 /// Called after an edit to synchronize the buffer's main parse tree with
1628 /// the buffer's new underlying state.
1629 ///
1630 /// Locks the syntax map and interpolates the edits since the last reparse
1631 /// into the foreground syntax tree.
1632 ///
1633 /// Then takes a stable snapshot of the syntax map before unlocking it.
1634 /// The snapshot with the interpolated edits is sent to a background thread,
1635 /// where we ask Tree-sitter to perform an incremental parse.
1636 ///
1637 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1638 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1639 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1640 ///
1641 /// If we time out waiting on the parse, we spawn a second task waiting
1642 /// until the parse does complete and return with the interpolated tree still
1643 /// in the foreground. When the background parse completes, call back into
1644 /// the main thread and assign the foreground parse state.
1645 ///
1646 /// If the buffer or grammar changed since the start of the background parse,
1647 /// initiate an additional reparse recursively. To avoid concurrent parses
1648 /// for the same buffer, we only initiate a new parse if we are not already
1649 /// parsing in the background.
1650 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1651 if self.reparse.is_some() {
1652 return;
1653 }
1654 let language = if let Some(language) = self.language.clone() {
1655 language
1656 } else {
1657 return;
1658 };
1659
1660 let text = self.text_snapshot();
1661 let parsed_version = self.version();
1662
1663 let mut syntax_map = self.syntax_map.lock();
1664 syntax_map.interpolate(&text);
1665 let language_registry = syntax_map.language_registry();
1666 let mut syntax_snapshot = syntax_map.snapshot();
1667 drop(syntax_map);
1668
1669 let parse_task = cx.background_spawn({
1670 let language = language.clone();
1671 let language_registry = language_registry.clone();
1672 async move {
1673 syntax_snapshot.reparse(&text, language_registry, language);
1674 syntax_snapshot
1675 }
1676 });
1677
1678 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1679 if may_block {
1680 match cx
1681 .background_executor()
1682 .block_with_timeout(self.sync_parse_timeout, parse_task)
1683 {
1684 Ok(new_syntax_snapshot) => {
1685 self.did_finish_parsing(new_syntax_snapshot, cx);
1686 self.reparse = None;
1687 }
1688 Err(parse_task) => {
1689 self.reparse = Some(cx.spawn(async move |this, cx| {
1690 let new_syntax_map = cx.background_spawn(parse_task).await;
1691 this.update(cx, move |this, cx| {
1692 let grammar_changed = || {
1693 this.language.as_ref().is_none_or(|current_language| {
1694 !Arc::ptr_eq(&language, current_language)
1695 })
1696 };
1697 let language_registry_changed = || {
1698 new_syntax_map.contains_unknown_injections()
1699 && language_registry.is_some_and(|registry| {
1700 registry.version()
1701 != new_syntax_map.language_registry_version()
1702 })
1703 };
1704 let parse_again = this.version.changed_since(&parsed_version)
1705 || language_registry_changed()
1706 || grammar_changed();
1707 this.did_finish_parsing(new_syntax_map, cx);
1708 this.reparse = None;
1709 if parse_again {
1710 this.reparse(cx, false);
1711 }
1712 })
1713 .ok();
1714 }));
1715 }
1716 }
1717 } else {
1718 self.reparse = Some(cx.spawn(async move |this, cx| {
1719 let new_syntax_map = cx.background_spawn(parse_task).await;
1720 this.update(cx, move |this, cx| {
1721 let grammar_changed = || {
1722 this.language.as_ref().is_none_or(|current_language| {
1723 !Arc::ptr_eq(&language, current_language)
1724 })
1725 };
1726 let language_registry_changed = || {
1727 new_syntax_map.contains_unknown_injections()
1728 && language_registry.is_some_and(|registry| {
1729 registry.version() != new_syntax_map.language_registry_version()
1730 })
1731 };
1732 let parse_again = this.version.changed_since(&parsed_version)
1733 || language_registry_changed()
1734 || grammar_changed();
1735 this.did_finish_parsing(new_syntax_map, cx);
1736 this.reparse = None;
1737 if parse_again {
1738 this.reparse(cx, false);
1739 }
1740 })
1741 .ok();
1742 }));
1743 }
1744 }
1745
1746 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1747 self.was_changed();
1748 self.non_text_state_update_count += 1;
1749 self.syntax_map.lock().did_parse(syntax_snapshot);
1750 self.request_autoindent(cx);
1751 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1752 self.tree_sitter_data.lock().clear();
1753 cx.emit(BufferEvent::Reparsed);
1754 cx.notify();
1755 }
1756
1757 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1758 self.parse_status.1.clone()
1759 }
1760
1761 /// Wait until the buffer is no longer parsing
1762 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1763 let mut parse_status = self.parse_status();
1764 async move {
1765 while *parse_status.borrow() != ParseStatus::Idle {
1766 if parse_status.changed().await.is_err() {
1767 break;
1768 }
1769 }
1770 }
1771 }
1772
1773 /// Assign to the buffer a set of diagnostics created by a given language server.
1774 pub fn update_diagnostics(
1775 &mut self,
1776 server_id: LanguageServerId,
1777 diagnostics: DiagnosticSet,
1778 cx: &mut Context<Self>,
1779 ) {
1780 let lamport_timestamp = self.text.lamport_clock.tick();
1781 let op = Operation::UpdateDiagnostics {
1782 server_id,
1783 diagnostics: diagnostics.iter().cloned().collect(),
1784 lamport_timestamp,
1785 };
1786
1787 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1788 self.send_operation(op, true, cx);
1789 }
1790
1791 pub fn buffer_diagnostics(
1792 &self,
1793 for_server: Option<LanguageServerId>,
1794 ) -> Vec<&DiagnosticEntry<Anchor>> {
1795 match for_server {
1796 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1797 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1798 Err(_) => Vec::new(),
1799 },
1800 None => self
1801 .diagnostics
1802 .iter()
1803 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1804 .collect(),
1805 }
1806 }
1807
1808 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1809 if let Some(indent_sizes) = self.compute_autoindents() {
1810 let indent_sizes = cx.background_spawn(indent_sizes);
1811 match cx
1812 .background_executor()
1813 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1814 {
1815 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1816 Err(indent_sizes) => {
1817 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1818 let indent_sizes = indent_sizes.await;
1819 this.update(cx, |this, cx| {
1820 this.apply_autoindents(indent_sizes, cx);
1821 })
1822 .ok();
1823 }));
1824 }
1825 }
1826 } else {
1827 self.autoindent_requests.clear();
1828 for tx in self.wait_for_autoindent_txs.drain(..) {
1829 tx.send(()).ok();
1830 }
1831 }
1832 }
1833
1834 fn compute_autoindents(
1835 &self,
1836 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1837 let max_rows_between_yields = 100;
1838 let snapshot = self.snapshot();
1839 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1840 return None;
1841 }
1842
1843 let autoindent_requests = self.autoindent_requests.clone();
1844 Some(async move {
1845 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1846 for request in autoindent_requests {
1847 // Resolve each edited range to its row in the current buffer and in the
1848 // buffer before this batch of edits.
1849 let mut row_ranges = Vec::new();
1850 let mut old_to_new_rows = BTreeMap::new();
1851 let mut language_indent_sizes_by_new_row = Vec::new();
1852 for entry in &request.entries {
1853 let position = entry.range.start;
1854 let new_row = position.to_point(&snapshot).row;
1855 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1856 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1857
1858 if !entry.first_line_is_new {
1859 let old_row = position.to_point(&request.before_edit).row;
1860 old_to_new_rows.insert(old_row, new_row);
1861 }
1862 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1863 }
1864
1865 // Build a map containing the suggested indentation for each of the edited lines
1866 // with respect to the state of the buffer before these edits. This map is keyed
1867 // by the rows for these lines in the current state of the buffer.
1868 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1869 let old_edited_ranges =
1870 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1871 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1872 let mut language_indent_size = IndentSize::default();
1873 for old_edited_range in old_edited_ranges {
1874 let suggestions = request
1875 .before_edit
1876 .suggest_autoindents(old_edited_range.clone())
1877 .into_iter()
1878 .flatten();
1879 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1880 if let Some(suggestion) = suggestion {
1881 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1882
1883 // Find the indent size based on the language for this row.
1884 while let Some((row, size)) = language_indent_sizes.peek() {
1885 if *row > new_row {
1886 break;
1887 }
1888 language_indent_size = *size;
1889 language_indent_sizes.next();
1890 }
1891
1892 let suggested_indent = old_to_new_rows
1893 .get(&suggestion.basis_row)
1894 .and_then(|from_row| {
1895 Some(old_suggestions.get(from_row).copied()?.0)
1896 })
1897 .unwrap_or_else(|| {
1898 request
1899 .before_edit
1900 .indent_size_for_line(suggestion.basis_row)
1901 })
1902 .with_delta(suggestion.delta, language_indent_size);
1903 old_suggestions
1904 .insert(new_row, (suggested_indent, suggestion.within_error));
1905 }
1906 }
1907 yield_now().await;
1908 }
1909
1910 // Compute new suggestions for each line, but only include them in the result
1911 // if they differ from the old suggestion for that line.
1912 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1913 let mut language_indent_size = IndentSize::default();
1914 for (row_range, original_indent_column) in row_ranges {
1915 let new_edited_row_range = if request.is_block_mode {
1916 row_range.start..row_range.start + 1
1917 } else {
1918 row_range.clone()
1919 };
1920
1921 let suggestions = snapshot
1922 .suggest_autoindents(new_edited_row_range.clone())
1923 .into_iter()
1924 .flatten();
1925 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1926 if let Some(suggestion) = suggestion {
1927 // Find the indent size based on the language for this row.
1928 while let Some((row, size)) = language_indent_sizes.peek() {
1929 if *row > new_row {
1930 break;
1931 }
1932 language_indent_size = *size;
1933 language_indent_sizes.next();
1934 }
1935
1936 let suggested_indent = indent_sizes
1937 .get(&suggestion.basis_row)
1938 .copied()
1939 .map(|e| e.0)
1940 .unwrap_or_else(|| {
1941 snapshot.indent_size_for_line(suggestion.basis_row)
1942 })
1943 .with_delta(suggestion.delta, language_indent_size);
1944
1945 if old_suggestions.get(&new_row).is_none_or(
1946 |(old_indentation, was_within_error)| {
1947 suggested_indent != *old_indentation
1948 && (!suggestion.within_error || *was_within_error)
1949 },
1950 ) {
1951 indent_sizes.insert(
1952 new_row,
1953 (suggested_indent, request.ignore_empty_lines),
1954 );
1955 }
1956 }
1957 }
1958
1959 if let (true, Some(original_indent_column)) =
1960 (request.is_block_mode, original_indent_column)
1961 {
1962 let new_indent =
1963 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1964 *indent
1965 } else {
1966 snapshot.indent_size_for_line(row_range.start)
1967 };
1968 let delta = new_indent.len as i64 - original_indent_column as i64;
1969 if delta != 0 {
1970 for row in row_range.skip(1) {
1971 indent_sizes.entry(row).or_insert_with(|| {
1972 let mut size = snapshot.indent_size_for_line(row);
1973 if size.kind == new_indent.kind {
1974 match delta.cmp(&0) {
1975 Ordering::Greater => size.len += delta as u32,
1976 Ordering::Less => {
1977 size.len = size.len.saturating_sub(-delta as u32)
1978 }
1979 Ordering::Equal => {}
1980 }
1981 }
1982 (size, request.ignore_empty_lines)
1983 });
1984 }
1985 }
1986 }
1987
1988 yield_now().await;
1989 }
1990 }
1991
1992 indent_sizes
1993 .into_iter()
1994 .filter_map(|(row, (indent, ignore_empty_lines))| {
1995 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1996 None
1997 } else {
1998 Some((row, indent))
1999 }
2000 })
2001 .collect()
2002 })
2003 }
2004
2005 fn apply_autoindents(
2006 &mut self,
2007 indent_sizes: BTreeMap<u32, IndentSize>,
2008 cx: &mut Context<Self>,
2009 ) {
2010 self.autoindent_requests.clear();
2011 for tx in self.wait_for_autoindent_txs.drain(..) {
2012 tx.send(()).ok();
2013 }
2014
2015 let edits: Vec<_> = indent_sizes
2016 .into_iter()
2017 .filter_map(|(row, indent_size)| {
2018 let current_size = indent_size_for_line(self, row);
2019 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2020 })
2021 .collect();
2022
2023 let preserve_preview = self.preserve_preview();
2024 self.edit(edits, None, cx);
2025 if preserve_preview {
2026 self.refresh_preview();
2027 }
2028 }
2029
2030 /// Create a minimal edit that will cause the given row to be indented
2031 /// with the given size. After applying this edit, the length of the line
2032 /// will always be at least `new_size.len`.
2033 pub fn edit_for_indent_size_adjustment(
2034 row: u32,
2035 current_size: IndentSize,
2036 new_size: IndentSize,
2037 ) -> Option<(Range<Point>, String)> {
2038 if new_size.kind == current_size.kind {
2039 match new_size.len.cmp(¤t_size.len) {
2040 Ordering::Greater => {
2041 let point = Point::new(row, 0);
2042 Some((
2043 point..point,
2044 iter::repeat(new_size.char())
2045 .take((new_size.len - current_size.len) as usize)
2046 .collect::<String>(),
2047 ))
2048 }
2049
2050 Ordering::Less => Some((
2051 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2052 String::new(),
2053 )),
2054
2055 Ordering::Equal => None,
2056 }
2057 } else {
2058 Some((
2059 Point::new(row, 0)..Point::new(row, current_size.len),
2060 iter::repeat(new_size.char())
2061 .take(new_size.len as usize)
2062 .collect::<String>(),
2063 ))
2064 }
2065 }
2066
2067 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2068 /// and the given new text.
2069 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2070 let old_text = self.as_rope().clone();
2071 let base_version = self.version();
2072 cx.background_executor()
2073 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2074 let old_text = old_text.to_string();
2075 let line_ending = LineEnding::detect(&new_text);
2076 LineEnding::normalize(&mut new_text);
2077 let edits = text_diff(&old_text, &new_text);
2078 Diff {
2079 base_version,
2080 line_ending,
2081 edits,
2082 }
2083 })
2084 }
2085
2086 /// Spawns a background task that searches the buffer for any whitespace
2087 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2088 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2089 let old_text = self.as_rope().clone();
2090 let line_ending = self.line_ending();
2091 let base_version = self.version();
2092 cx.background_spawn(async move {
2093 let ranges = trailing_whitespace_ranges(&old_text);
2094 let empty = Arc::<str>::from("");
2095 Diff {
2096 base_version,
2097 line_ending,
2098 edits: ranges
2099 .into_iter()
2100 .map(|range| (range, empty.clone()))
2101 .collect(),
2102 }
2103 })
2104 }
2105
2106 /// Ensures that the buffer ends with a single newline character, and
2107 /// no other whitespace. Skips if the buffer is empty.
2108 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2109 let len = self.len();
2110 if len == 0 {
2111 return;
2112 }
2113 let mut offset = len;
2114 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2115 let non_whitespace_len = chunk
2116 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2117 .len();
2118 offset -= chunk.len();
2119 offset += non_whitespace_len;
2120 if non_whitespace_len != 0 {
2121 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2122 return;
2123 }
2124 break;
2125 }
2126 }
2127 self.edit([(offset..len, "\n")], None, cx);
2128 }
2129
2130 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2131 /// calculated, then adjust the diff to account for those changes, and discard any
2132 /// parts of the diff that conflict with those changes.
2133 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2134 let snapshot = self.snapshot();
2135 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2136 let mut delta = 0;
2137 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2138 while let Some(edit_since) = edits_since.peek() {
2139 // If the edit occurs after a diff hunk, then it does not
2140 // affect that hunk.
2141 if edit_since.old.start > range.end {
2142 break;
2143 }
2144 // If the edit precedes the diff hunk, then adjust the hunk
2145 // to reflect the edit.
2146 else if edit_since.old.end < range.start {
2147 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2148 edits_since.next();
2149 }
2150 // If the edit intersects a diff hunk, then discard that hunk.
2151 else {
2152 return None;
2153 }
2154 }
2155
2156 let start = (range.start as i64 + delta) as usize;
2157 let end = (range.end as i64 + delta) as usize;
2158 Some((start..end, new_text))
2159 });
2160
2161 self.start_transaction();
2162 self.text.set_line_ending(diff.line_ending);
2163 self.edit(adjusted_edits, None, cx);
2164 self.end_transaction(cx)
2165 }
2166
2167 pub fn has_unsaved_edits(&self) -> bool {
2168 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2169
2170 if last_version == self.version {
2171 self.has_unsaved_edits
2172 .set((last_version, has_unsaved_edits));
2173 return has_unsaved_edits;
2174 }
2175
2176 let has_edits = self.has_edits_since(&self.saved_version);
2177 self.has_unsaved_edits
2178 .set((self.version.clone(), has_edits));
2179 has_edits
2180 }
2181
2182 /// Checks if the buffer has unsaved changes.
2183 pub fn is_dirty(&self) -> bool {
2184 if self.capability == Capability::ReadOnly {
2185 return false;
2186 }
2187 if self.has_conflict {
2188 return true;
2189 }
2190 match self.file.as_ref().map(|f| f.disk_state()) {
2191 Some(DiskState::New) | Some(DiskState::Deleted) => {
2192 !self.is_empty() && self.has_unsaved_edits()
2193 }
2194 _ => self.has_unsaved_edits(),
2195 }
2196 }
2197
2198 /// Marks the buffer as having a conflict regardless of current buffer state.
2199 pub fn set_conflict(&mut self) {
2200 self.has_conflict = true;
2201 }
2202
2203 /// Checks if the buffer and its file have both changed since the buffer
2204 /// was last saved or reloaded.
2205 pub fn has_conflict(&self) -> bool {
2206 if self.has_conflict {
2207 return true;
2208 }
2209 let Some(file) = self.file.as_ref() else {
2210 return false;
2211 };
2212 match file.disk_state() {
2213 DiskState::New => false,
2214 DiskState::Present { mtime } => match self.saved_mtime {
2215 Some(saved_mtime) => {
2216 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2217 }
2218 None => true,
2219 },
2220 DiskState::Deleted => false,
2221 }
2222 }
2223
2224 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2225 pub fn subscribe(&mut self) -> Subscription<usize> {
2226 self.text.subscribe()
2227 }
2228
2229 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2230 ///
2231 /// This allows downstream code to check if the buffer's text has changed without
2232 /// waiting for an effect cycle, which would be required if using eents.
2233 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2234 if let Err(ix) = self
2235 .change_bits
2236 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2237 {
2238 self.change_bits.insert(ix, bit);
2239 }
2240 }
2241
2242 /// Set the change bit for all "listeners".
2243 fn was_changed(&mut self) {
2244 self.change_bits.retain(|change_bit| {
2245 change_bit
2246 .upgrade()
2247 .inspect(|bit| {
2248 _ = bit.replace(true);
2249 })
2250 .is_some()
2251 });
2252 }
2253
2254 /// Starts a transaction, if one is not already in-progress. When undoing or
2255 /// redoing edits, all of the edits performed within a transaction are undone
2256 /// or redone together.
2257 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2258 self.start_transaction_at(Instant::now())
2259 }
2260
2261 /// Starts a transaction, providing the current time. Subsequent transactions
2262 /// that occur within a short period of time will be grouped together. This
2263 /// is controlled by the buffer's undo grouping duration.
2264 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2265 self.transaction_depth += 1;
2266 if self.was_dirty_before_starting_transaction.is_none() {
2267 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2268 }
2269 self.text.start_transaction_at(now)
2270 }
2271
2272 /// Terminates the current transaction, if this is the outermost transaction.
2273 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2274 self.end_transaction_at(Instant::now(), cx)
2275 }
2276
2277 /// Terminates the current transaction, providing the current time. Subsequent transactions
2278 /// that occur within a short period of time will be grouped together. This
2279 /// is controlled by the buffer's undo grouping duration.
2280 pub fn end_transaction_at(
2281 &mut self,
2282 now: Instant,
2283 cx: &mut Context<Self>,
2284 ) -> Option<TransactionId> {
2285 assert!(self.transaction_depth > 0);
2286 self.transaction_depth -= 1;
2287 let was_dirty = if self.transaction_depth == 0 {
2288 self.was_dirty_before_starting_transaction.take().unwrap()
2289 } else {
2290 false
2291 };
2292 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2293 self.did_edit(&start_version, was_dirty, cx);
2294 Some(transaction_id)
2295 } else {
2296 None
2297 }
2298 }
2299
2300 /// Manually add a transaction to the buffer's undo history.
2301 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2302 self.text.push_transaction(transaction, now);
2303 }
2304
2305 /// Differs from `push_transaction` in that it does not clear the redo
2306 /// stack. Intended to be used to create a parent transaction to merge
2307 /// potential child transactions into.
2308 ///
2309 /// The caller is responsible for removing it from the undo history using
2310 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2311 /// are merged into this transaction, the caller is responsible for ensuring
2312 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2313 /// cleared is to create transactions with the usual `start_transaction` and
2314 /// `end_transaction` methods and merging the resulting transactions into
2315 /// the transaction created by this method
2316 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2317 self.text.push_empty_transaction(now)
2318 }
2319
2320 /// Prevent the last transaction from being grouped with any subsequent transactions,
2321 /// even if they occur with the buffer's undo grouping duration.
2322 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2323 self.text.finalize_last_transaction()
2324 }
2325
2326 /// Manually group all changes since a given transaction.
2327 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2328 self.text.group_until_transaction(transaction_id);
2329 }
2330
2331 /// Manually remove a transaction from the buffer's undo history
2332 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2333 self.text.forget_transaction(transaction_id)
2334 }
2335
2336 /// Retrieve a transaction from the buffer's undo history
2337 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2338 self.text.get_transaction(transaction_id)
2339 }
2340
2341 /// Manually merge two transactions in the buffer's undo history.
2342 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2343 self.text.merge_transactions(transaction, destination);
2344 }
2345
2346 /// Waits for the buffer to receive operations with the given timestamps.
2347 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2348 &mut self,
2349 edit_ids: It,
2350 ) -> impl Future<Output = Result<()>> + use<It> {
2351 self.text.wait_for_edits(edit_ids)
2352 }
2353
2354 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2355 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2356 &mut self,
2357 anchors: It,
2358 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2359 self.text.wait_for_anchors(anchors)
2360 }
2361
2362 /// Waits for the buffer to receive operations up to the given version.
2363 pub fn wait_for_version(
2364 &mut self,
2365 version: clock::Global,
2366 ) -> impl Future<Output = Result<()>> + use<> {
2367 self.text.wait_for_version(version)
2368 }
2369
2370 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2371 /// [`Buffer::wait_for_version`] to resolve with an error.
2372 pub fn give_up_waiting(&mut self) {
2373 self.text.give_up_waiting();
2374 }
2375
2376 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2377 let mut rx = None;
2378 if !self.autoindent_requests.is_empty() {
2379 let channel = oneshot::channel();
2380 self.wait_for_autoindent_txs.push(channel.0);
2381 rx = Some(channel.1);
2382 }
2383 rx
2384 }
2385
2386 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2387 pub fn set_active_selections(
2388 &mut self,
2389 selections: Arc<[Selection<Anchor>]>,
2390 line_mode: bool,
2391 cursor_shape: CursorShape,
2392 cx: &mut Context<Self>,
2393 ) {
2394 let lamport_timestamp = self.text.lamport_clock.tick();
2395 self.remote_selections.insert(
2396 self.text.replica_id(),
2397 SelectionSet {
2398 selections: selections.clone(),
2399 lamport_timestamp,
2400 line_mode,
2401 cursor_shape,
2402 },
2403 );
2404 self.send_operation(
2405 Operation::UpdateSelections {
2406 selections,
2407 line_mode,
2408 lamport_timestamp,
2409 cursor_shape,
2410 },
2411 true,
2412 cx,
2413 );
2414 self.non_text_state_update_count += 1;
2415 cx.notify();
2416 }
2417
2418 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2419 /// this replica.
2420 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2421 if self
2422 .remote_selections
2423 .get(&self.text.replica_id())
2424 .is_none_or(|set| !set.selections.is_empty())
2425 {
2426 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2427 }
2428 }
2429
2430 pub fn set_agent_selections(
2431 &mut self,
2432 selections: Arc<[Selection<Anchor>]>,
2433 line_mode: bool,
2434 cursor_shape: CursorShape,
2435 cx: &mut Context<Self>,
2436 ) {
2437 let lamport_timestamp = self.text.lamport_clock.tick();
2438 self.remote_selections.insert(
2439 ReplicaId::AGENT,
2440 SelectionSet {
2441 selections,
2442 lamport_timestamp,
2443 line_mode,
2444 cursor_shape,
2445 },
2446 );
2447 self.non_text_state_update_count += 1;
2448 cx.notify();
2449 }
2450
2451 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2452 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2453 }
2454
2455 /// Replaces the buffer's entire text.
2456 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2457 where
2458 T: Into<Arc<str>>,
2459 {
2460 self.autoindent_requests.clear();
2461 self.edit([(0..self.len(), text)], None, cx)
2462 }
2463
2464 /// Appends the given text to the end of the buffer.
2465 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2466 where
2467 T: Into<Arc<str>>,
2468 {
2469 self.edit([(self.len()..self.len(), text)], None, cx)
2470 }
2471
2472 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2473 /// delete, and a string of text to insert at that location.
2474 ///
2475 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2476 /// request for the edited ranges, which will be processed when the buffer finishes
2477 /// parsing.
2478 ///
2479 /// Parsing takes place at the end of a transaction, and may compute synchronously
2480 /// or asynchronously, depending on the changes.
2481 pub fn edit<I, S, T>(
2482 &mut self,
2483 edits_iter: I,
2484 autoindent_mode: Option<AutoindentMode>,
2485 cx: &mut Context<Self>,
2486 ) -> Option<clock::Lamport>
2487 where
2488 I: IntoIterator<Item = (Range<S>, T)>,
2489 S: ToOffset,
2490 T: Into<Arc<str>>,
2491 {
2492 // Skip invalid edits and coalesce contiguous ones.
2493 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2494
2495 for (range, new_text) in edits_iter {
2496 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2497
2498 if range.start > range.end {
2499 mem::swap(&mut range.start, &mut range.end);
2500 }
2501 let new_text = new_text.into();
2502 if !new_text.is_empty() || !range.is_empty() {
2503 if let Some((prev_range, prev_text)) = edits.last_mut()
2504 && prev_range.end >= range.start
2505 {
2506 prev_range.end = cmp::max(prev_range.end, range.end);
2507 *prev_text = format!("{prev_text}{new_text}").into();
2508 } else {
2509 edits.push((range, new_text));
2510 }
2511 }
2512 }
2513 if edits.is_empty() {
2514 return None;
2515 }
2516
2517 self.start_transaction();
2518 self.pending_autoindent.take();
2519 let autoindent_request = autoindent_mode
2520 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2521
2522 let edit_operation = self.text.edit(edits.iter().cloned());
2523 let edit_id = edit_operation.timestamp();
2524
2525 if let Some((before_edit, mode)) = autoindent_request {
2526 let mut delta = 0isize;
2527 let mut previous_setting = None;
2528 let entries: Vec<_> = edits
2529 .into_iter()
2530 .enumerate()
2531 .zip(&edit_operation.as_edit().unwrap().new_text)
2532 .filter(|((_, (range, _)), _)| {
2533 let language = before_edit.language_at(range.start);
2534 let language_id = language.map(|l| l.id());
2535 if let Some((cached_language_id, auto_indent)) = previous_setting
2536 && cached_language_id == language_id
2537 {
2538 auto_indent
2539 } else {
2540 // The auto-indent setting is not present in editorconfigs, hence
2541 // we can avoid passing the file here.
2542 let auto_indent =
2543 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2544 previous_setting = Some((language_id, auto_indent));
2545 auto_indent
2546 }
2547 })
2548 .map(|((ix, (range, _)), new_text)| {
2549 let new_text_length = new_text.len();
2550 let old_start = range.start.to_point(&before_edit);
2551 let new_start = (delta + range.start as isize) as usize;
2552 let range_len = range.end - range.start;
2553 delta += new_text_length as isize - range_len as isize;
2554
2555 // Decide what range of the insertion to auto-indent, and whether
2556 // the first line of the insertion should be considered a newly-inserted line
2557 // or an edit to an existing line.
2558 let mut range_of_insertion_to_indent = 0..new_text_length;
2559 let mut first_line_is_new = true;
2560
2561 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2562 let old_line_end = before_edit.line_len(old_start.row);
2563
2564 if old_start.column > old_line_start {
2565 first_line_is_new = false;
2566 }
2567
2568 if !new_text.contains('\n')
2569 && (old_start.column + (range_len as u32) < old_line_end
2570 || old_line_end == old_line_start)
2571 {
2572 first_line_is_new = false;
2573 }
2574
2575 // When inserting text starting with a newline, avoid auto-indenting the
2576 // previous line.
2577 if new_text.starts_with('\n') {
2578 range_of_insertion_to_indent.start += 1;
2579 first_line_is_new = true;
2580 }
2581
2582 let mut original_indent_column = None;
2583 if let AutoindentMode::Block {
2584 original_indent_columns,
2585 } = &mode
2586 {
2587 original_indent_column = Some(if new_text.starts_with('\n') {
2588 indent_size_for_text(
2589 new_text[range_of_insertion_to_indent.clone()].chars(),
2590 )
2591 .len
2592 } else {
2593 original_indent_columns
2594 .get(ix)
2595 .copied()
2596 .flatten()
2597 .unwrap_or_else(|| {
2598 indent_size_for_text(
2599 new_text[range_of_insertion_to_indent.clone()].chars(),
2600 )
2601 .len
2602 })
2603 });
2604
2605 // Avoid auto-indenting the line after the edit.
2606 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2607 range_of_insertion_to_indent.end -= 1;
2608 }
2609 }
2610
2611 AutoindentRequestEntry {
2612 first_line_is_new,
2613 original_indent_column,
2614 indent_size: before_edit.language_indent_size_at(range.start, cx),
2615 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2616 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2617 }
2618 })
2619 .collect();
2620
2621 if !entries.is_empty() {
2622 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2623 before_edit,
2624 entries,
2625 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2626 ignore_empty_lines: false,
2627 }));
2628 }
2629 }
2630
2631 self.end_transaction(cx);
2632 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2633 Some(edit_id)
2634 }
2635
2636 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2637 self.was_changed();
2638
2639 if self.edits_since::<usize>(old_version).next().is_none() {
2640 return;
2641 }
2642
2643 self.reparse(cx, true);
2644 cx.emit(BufferEvent::Edited);
2645 if was_dirty != self.is_dirty() {
2646 cx.emit(BufferEvent::DirtyChanged);
2647 }
2648 cx.notify();
2649 }
2650
2651 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2652 where
2653 I: IntoIterator<Item = Range<T>>,
2654 T: ToOffset + Copy,
2655 {
2656 let before_edit = self.snapshot();
2657 let entries = ranges
2658 .into_iter()
2659 .map(|range| AutoindentRequestEntry {
2660 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2661 first_line_is_new: true,
2662 indent_size: before_edit.language_indent_size_at(range.start, cx),
2663 original_indent_column: None,
2664 })
2665 .collect();
2666 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2667 before_edit,
2668 entries,
2669 is_block_mode: false,
2670 ignore_empty_lines: true,
2671 }));
2672 self.request_autoindent(cx);
2673 }
2674
2675 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2676 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2677 pub fn insert_empty_line(
2678 &mut self,
2679 position: impl ToPoint,
2680 space_above: bool,
2681 space_below: bool,
2682 cx: &mut Context<Self>,
2683 ) -> Point {
2684 let mut position = position.to_point(self);
2685
2686 self.start_transaction();
2687
2688 self.edit(
2689 [(position..position, "\n")],
2690 Some(AutoindentMode::EachLine),
2691 cx,
2692 );
2693
2694 if position.column > 0 {
2695 position += Point::new(1, 0);
2696 }
2697
2698 if !self.is_line_blank(position.row) {
2699 self.edit(
2700 [(position..position, "\n")],
2701 Some(AutoindentMode::EachLine),
2702 cx,
2703 );
2704 }
2705
2706 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2707 self.edit(
2708 [(position..position, "\n")],
2709 Some(AutoindentMode::EachLine),
2710 cx,
2711 );
2712 position.row += 1;
2713 }
2714
2715 if space_below
2716 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2717 {
2718 self.edit(
2719 [(position..position, "\n")],
2720 Some(AutoindentMode::EachLine),
2721 cx,
2722 );
2723 }
2724
2725 self.end_transaction(cx);
2726
2727 position
2728 }
2729
2730 /// Applies the given remote operations to the buffer.
2731 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2732 self.pending_autoindent.take();
2733 let was_dirty = self.is_dirty();
2734 let old_version = self.version.clone();
2735 let mut deferred_ops = Vec::new();
2736 let buffer_ops = ops
2737 .into_iter()
2738 .filter_map(|op| match op {
2739 Operation::Buffer(op) => Some(op),
2740 _ => {
2741 if self.can_apply_op(&op) {
2742 self.apply_op(op, cx);
2743 } else {
2744 deferred_ops.push(op);
2745 }
2746 None
2747 }
2748 })
2749 .collect::<Vec<_>>();
2750 for operation in buffer_ops.iter() {
2751 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2752 }
2753 self.text.apply_ops(buffer_ops);
2754 self.deferred_ops.insert(deferred_ops);
2755 self.flush_deferred_ops(cx);
2756 self.did_edit(&old_version, was_dirty, cx);
2757 // Notify independently of whether the buffer was edited as the operations could include a
2758 // selection update.
2759 cx.notify();
2760 }
2761
2762 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2763 let mut deferred_ops = Vec::new();
2764 for op in self.deferred_ops.drain().iter().cloned() {
2765 if self.can_apply_op(&op) {
2766 self.apply_op(op, cx);
2767 } else {
2768 deferred_ops.push(op);
2769 }
2770 }
2771 self.deferred_ops.insert(deferred_ops);
2772 }
2773
2774 pub fn has_deferred_ops(&self) -> bool {
2775 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2776 }
2777
2778 fn can_apply_op(&self, operation: &Operation) -> bool {
2779 match operation {
2780 Operation::Buffer(_) => {
2781 unreachable!("buffer operations should never be applied at this layer")
2782 }
2783 Operation::UpdateDiagnostics {
2784 diagnostics: diagnostic_set,
2785 ..
2786 } => diagnostic_set.iter().all(|diagnostic| {
2787 self.text.can_resolve(&diagnostic.range.start)
2788 && self.text.can_resolve(&diagnostic.range.end)
2789 }),
2790 Operation::UpdateSelections { selections, .. } => selections
2791 .iter()
2792 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2793 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2794 }
2795 }
2796
2797 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2798 match operation {
2799 Operation::Buffer(_) => {
2800 unreachable!("buffer operations should never be applied at this layer")
2801 }
2802 Operation::UpdateDiagnostics {
2803 server_id,
2804 diagnostics: diagnostic_set,
2805 lamport_timestamp,
2806 } => {
2807 let snapshot = self.snapshot();
2808 self.apply_diagnostic_update(
2809 server_id,
2810 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2811 lamport_timestamp,
2812 cx,
2813 );
2814 }
2815 Operation::UpdateSelections {
2816 selections,
2817 lamport_timestamp,
2818 line_mode,
2819 cursor_shape,
2820 } => {
2821 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2822 && set.lamport_timestamp > lamport_timestamp
2823 {
2824 return;
2825 }
2826
2827 self.remote_selections.insert(
2828 lamport_timestamp.replica_id,
2829 SelectionSet {
2830 selections,
2831 lamport_timestamp,
2832 line_mode,
2833 cursor_shape,
2834 },
2835 );
2836 self.text.lamport_clock.observe(lamport_timestamp);
2837 self.non_text_state_update_count += 1;
2838 }
2839 Operation::UpdateCompletionTriggers {
2840 triggers,
2841 lamport_timestamp,
2842 server_id,
2843 } => {
2844 if triggers.is_empty() {
2845 self.completion_triggers_per_language_server
2846 .remove(&server_id);
2847 self.completion_triggers = self
2848 .completion_triggers_per_language_server
2849 .values()
2850 .flat_map(|triggers| triggers.iter().cloned())
2851 .collect();
2852 } else {
2853 self.completion_triggers_per_language_server
2854 .insert(server_id, triggers.iter().cloned().collect());
2855 self.completion_triggers.extend(triggers);
2856 }
2857 self.text.lamport_clock.observe(lamport_timestamp);
2858 }
2859 Operation::UpdateLineEnding {
2860 line_ending,
2861 lamport_timestamp,
2862 } => {
2863 self.text.set_line_ending(line_ending);
2864 self.text.lamport_clock.observe(lamport_timestamp);
2865 }
2866 }
2867 }
2868
2869 fn apply_diagnostic_update(
2870 &mut self,
2871 server_id: LanguageServerId,
2872 diagnostics: DiagnosticSet,
2873 lamport_timestamp: clock::Lamport,
2874 cx: &mut Context<Self>,
2875 ) {
2876 if lamport_timestamp > self.diagnostics_timestamp {
2877 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2878 if diagnostics.is_empty() {
2879 if let Ok(ix) = ix {
2880 self.diagnostics.remove(ix);
2881 }
2882 } else {
2883 match ix {
2884 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2885 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2886 };
2887 }
2888 self.diagnostics_timestamp = lamport_timestamp;
2889 self.non_text_state_update_count += 1;
2890 self.text.lamport_clock.observe(lamport_timestamp);
2891 cx.notify();
2892 cx.emit(BufferEvent::DiagnosticsUpdated);
2893 }
2894 }
2895
2896 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2897 self.was_changed();
2898 cx.emit(BufferEvent::Operation {
2899 operation,
2900 is_local,
2901 });
2902 }
2903
2904 /// Removes the selections for a given peer.
2905 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2906 self.remote_selections.remove(&replica_id);
2907 cx.notify();
2908 }
2909
2910 /// Undoes the most recent transaction.
2911 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2912 let was_dirty = self.is_dirty();
2913 let old_version = self.version.clone();
2914
2915 if let Some((transaction_id, operation)) = self.text.undo() {
2916 self.send_operation(Operation::Buffer(operation), true, cx);
2917 self.did_edit(&old_version, was_dirty, cx);
2918 Some(transaction_id)
2919 } else {
2920 None
2921 }
2922 }
2923
2924 /// Manually undoes a specific transaction in the buffer's undo history.
2925 pub fn undo_transaction(
2926 &mut self,
2927 transaction_id: TransactionId,
2928 cx: &mut Context<Self>,
2929 ) -> bool {
2930 let was_dirty = self.is_dirty();
2931 let old_version = self.version.clone();
2932 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2933 self.send_operation(Operation::Buffer(operation), true, cx);
2934 self.did_edit(&old_version, was_dirty, cx);
2935 true
2936 } else {
2937 false
2938 }
2939 }
2940
2941 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2942 pub fn undo_to_transaction(
2943 &mut self,
2944 transaction_id: TransactionId,
2945 cx: &mut Context<Self>,
2946 ) -> bool {
2947 let was_dirty = self.is_dirty();
2948 let old_version = self.version.clone();
2949
2950 let operations = self.text.undo_to_transaction(transaction_id);
2951 let undone = !operations.is_empty();
2952 for operation in operations {
2953 self.send_operation(Operation::Buffer(operation), true, cx);
2954 }
2955 if undone {
2956 self.did_edit(&old_version, was_dirty, cx)
2957 }
2958 undone
2959 }
2960
2961 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2962 let was_dirty = self.is_dirty();
2963 let operation = self.text.undo_operations(counts);
2964 let old_version = self.version.clone();
2965 self.send_operation(Operation::Buffer(operation), true, cx);
2966 self.did_edit(&old_version, was_dirty, cx);
2967 }
2968
2969 /// Manually redoes a specific transaction in the buffer's redo history.
2970 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2971 let was_dirty = self.is_dirty();
2972 let old_version = self.version.clone();
2973
2974 if let Some((transaction_id, operation)) = self.text.redo() {
2975 self.send_operation(Operation::Buffer(operation), true, cx);
2976 self.did_edit(&old_version, was_dirty, cx);
2977 Some(transaction_id)
2978 } else {
2979 None
2980 }
2981 }
2982
2983 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2984 pub fn redo_to_transaction(
2985 &mut self,
2986 transaction_id: TransactionId,
2987 cx: &mut Context<Self>,
2988 ) -> bool {
2989 let was_dirty = self.is_dirty();
2990 let old_version = self.version.clone();
2991
2992 let operations = self.text.redo_to_transaction(transaction_id);
2993 let redone = !operations.is_empty();
2994 for operation in operations {
2995 self.send_operation(Operation::Buffer(operation), true, cx);
2996 }
2997 if redone {
2998 self.did_edit(&old_version, was_dirty, cx)
2999 }
3000 redone
3001 }
3002
3003 /// Override current completion triggers with the user-provided completion triggers.
3004 pub fn set_completion_triggers(
3005 &mut self,
3006 server_id: LanguageServerId,
3007 triggers: BTreeSet<String>,
3008 cx: &mut Context<Self>,
3009 ) {
3010 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3011 if triggers.is_empty() {
3012 self.completion_triggers_per_language_server
3013 .remove(&server_id);
3014 self.completion_triggers = self
3015 .completion_triggers_per_language_server
3016 .values()
3017 .flat_map(|triggers| triggers.iter().cloned())
3018 .collect();
3019 } else {
3020 self.completion_triggers_per_language_server
3021 .insert(server_id, triggers.clone());
3022 self.completion_triggers.extend(triggers.iter().cloned());
3023 }
3024 self.send_operation(
3025 Operation::UpdateCompletionTriggers {
3026 triggers: triggers.into_iter().collect(),
3027 lamport_timestamp: self.completion_triggers_timestamp,
3028 server_id,
3029 },
3030 true,
3031 cx,
3032 );
3033 cx.notify();
3034 }
3035
3036 /// Returns a list of strings which trigger a completion menu for this language.
3037 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3038 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3039 &self.completion_triggers
3040 }
3041
3042 /// Call this directly after performing edits to prevent the preview tab
3043 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3044 /// to return false until there are additional edits.
3045 pub fn refresh_preview(&mut self) {
3046 self.preview_version = self.version.clone();
3047 }
3048
3049 /// Whether we should preserve the preview status of a tab containing this buffer.
3050 pub fn preserve_preview(&self) -> bool {
3051 !self.has_edits_since(&self.preview_version)
3052 }
3053}
3054
3055#[doc(hidden)]
3056#[cfg(any(test, feature = "test-support"))]
3057impl Buffer {
3058 pub fn edit_via_marked_text(
3059 &mut self,
3060 marked_string: &str,
3061 autoindent_mode: Option<AutoindentMode>,
3062 cx: &mut Context<Self>,
3063 ) {
3064 let edits = self.edits_for_marked_text(marked_string);
3065 self.edit(edits, autoindent_mode, cx);
3066 }
3067
3068 pub fn set_group_interval(&mut self, group_interval: Duration) {
3069 self.text.set_group_interval(group_interval);
3070 }
3071
3072 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3073 where
3074 T: rand::Rng,
3075 {
3076 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3077 let mut last_end = None;
3078 for _ in 0..old_range_count {
3079 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3080 break;
3081 }
3082
3083 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3084 let mut range = self.random_byte_range(new_start, rng);
3085 if rng.random_bool(0.2) {
3086 mem::swap(&mut range.start, &mut range.end);
3087 }
3088 last_end = Some(range.end);
3089
3090 let new_text_len = rng.random_range(0..10);
3091 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3092 new_text = new_text.to_uppercase();
3093
3094 edits.push((range, new_text));
3095 }
3096 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3097 self.edit(edits, None, cx);
3098 }
3099
3100 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3101 let was_dirty = self.is_dirty();
3102 let old_version = self.version.clone();
3103
3104 let ops = self.text.randomly_undo_redo(rng);
3105 if !ops.is_empty() {
3106 for op in ops {
3107 self.send_operation(Operation::Buffer(op), true, cx);
3108 self.did_edit(&old_version, was_dirty, cx);
3109 }
3110 }
3111 }
3112}
3113
3114impl EventEmitter<BufferEvent> for Buffer {}
3115
3116impl Deref for Buffer {
3117 type Target = TextBuffer;
3118
3119 fn deref(&self) -> &Self::Target {
3120 &self.text
3121 }
3122}
3123
3124impl BufferSnapshot {
3125 /// Returns [`IndentSize`] for a given line that respects user settings and
3126 /// language preferences.
3127 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3128 indent_size_for_line(self, row)
3129 }
3130
3131 /// Returns [`IndentSize`] for a given position that respects user settings
3132 /// and language preferences.
3133 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3134 let settings = language_settings(
3135 self.language_at(position).map(|l| l.name()),
3136 self.file(),
3137 cx,
3138 );
3139 if settings.hard_tabs {
3140 IndentSize::tab()
3141 } else {
3142 IndentSize::spaces(settings.tab_size.get())
3143 }
3144 }
3145
3146 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3147 /// is passed in as `single_indent_size`.
3148 pub fn suggested_indents(
3149 &self,
3150 rows: impl Iterator<Item = u32>,
3151 single_indent_size: IndentSize,
3152 ) -> BTreeMap<u32, IndentSize> {
3153 let mut result = BTreeMap::new();
3154
3155 for row_range in contiguous_ranges(rows, 10) {
3156 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3157 Some(suggestions) => suggestions,
3158 _ => break,
3159 };
3160
3161 for (row, suggestion) in row_range.zip(suggestions) {
3162 let indent_size = if let Some(suggestion) = suggestion {
3163 result
3164 .get(&suggestion.basis_row)
3165 .copied()
3166 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3167 .with_delta(suggestion.delta, single_indent_size)
3168 } else {
3169 self.indent_size_for_line(row)
3170 };
3171
3172 result.insert(row, indent_size);
3173 }
3174 }
3175
3176 result
3177 }
3178
3179 fn suggest_autoindents(
3180 &self,
3181 row_range: Range<u32>,
3182 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3183 let config = &self.language.as_ref()?.config;
3184 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3185
3186 #[derive(Debug, Clone)]
3187 struct StartPosition {
3188 start: Point,
3189 suffix: SharedString,
3190 }
3191
3192 // Find the suggested indentation ranges based on the syntax tree.
3193 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3194 let end = Point::new(row_range.end, 0);
3195 let range = (start..end).to_offset(&self.text);
3196 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3197 Some(&grammar.indents_config.as_ref()?.query)
3198 });
3199 let indent_configs = matches
3200 .grammars()
3201 .iter()
3202 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3203 .collect::<Vec<_>>();
3204
3205 let mut indent_ranges = Vec::<Range<Point>>::new();
3206 let mut start_positions = Vec::<StartPosition>::new();
3207 let mut outdent_positions = Vec::<Point>::new();
3208 while let Some(mat) = matches.peek() {
3209 let mut start: Option<Point> = None;
3210 let mut end: Option<Point> = None;
3211
3212 let config = indent_configs[mat.grammar_index];
3213 for capture in mat.captures {
3214 if capture.index == config.indent_capture_ix {
3215 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3216 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3217 } else if Some(capture.index) == config.start_capture_ix {
3218 start = Some(Point::from_ts_point(capture.node.end_position()));
3219 } else if Some(capture.index) == config.end_capture_ix {
3220 end = Some(Point::from_ts_point(capture.node.start_position()));
3221 } else if Some(capture.index) == config.outdent_capture_ix {
3222 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3223 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3224 start_positions.push(StartPosition {
3225 start: Point::from_ts_point(capture.node.start_position()),
3226 suffix: suffix.clone(),
3227 });
3228 }
3229 }
3230
3231 matches.advance();
3232 if let Some((start, end)) = start.zip(end) {
3233 if start.row == end.row {
3234 continue;
3235 }
3236 let range = start..end;
3237 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3238 Err(ix) => indent_ranges.insert(ix, range),
3239 Ok(ix) => {
3240 let prev_range = &mut indent_ranges[ix];
3241 prev_range.end = prev_range.end.max(range.end);
3242 }
3243 }
3244 }
3245 }
3246
3247 let mut error_ranges = Vec::<Range<Point>>::new();
3248 let mut matches = self
3249 .syntax
3250 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3251 while let Some(mat) = matches.peek() {
3252 let node = mat.captures[0].node;
3253 let start = Point::from_ts_point(node.start_position());
3254 let end = Point::from_ts_point(node.end_position());
3255 let range = start..end;
3256 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3257 Ok(ix) | Err(ix) => ix,
3258 };
3259 let mut end_ix = ix;
3260 while let Some(existing_range) = error_ranges.get(end_ix) {
3261 if existing_range.end < end {
3262 end_ix += 1;
3263 } else {
3264 break;
3265 }
3266 }
3267 error_ranges.splice(ix..end_ix, [range]);
3268 matches.advance();
3269 }
3270
3271 outdent_positions.sort();
3272 for outdent_position in outdent_positions {
3273 // find the innermost indent range containing this outdent_position
3274 // set its end to the outdent position
3275 if let Some(range_to_truncate) = indent_ranges
3276 .iter_mut()
3277 .filter(|indent_range| indent_range.contains(&outdent_position))
3278 .next_back()
3279 {
3280 range_to_truncate.end = outdent_position;
3281 }
3282 }
3283
3284 start_positions.sort_by_key(|b| b.start);
3285
3286 // Find the suggested indentation increases and decreased based on regexes.
3287 let mut regex_outdent_map = HashMap::default();
3288 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3289 let mut start_positions_iter = start_positions.iter().peekable();
3290
3291 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3292 self.for_each_line(
3293 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3294 ..Point::new(row_range.end, 0),
3295 |row, line| {
3296 if config
3297 .decrease_indent_pattern
3298 .as_ref()
3299 .is_some_and(|regex| regex.is_match(line))
3300 {
3301 indent_change_rows.push((row, Ordering::Less));
3302 }
3303 if config
3304 .increase_indent_pattern
3305 .as_ref()
3306 .is_some_and(|regex| regex.is_match(line))
3307 {
3308 indent_change_rows.push((row + 1, Ordering::Greater));
3309 }
3310 while let Some(pos) = start_positions_iter.peek() {
3311 if pos.start.row < row {
3312 let pos = start_positions_iter.next().unwrap();
3313 last_seen_suffix
3314 .entry(pos.suffix.to_string())
3315 .or_default()
3316 .push(pos.start);
3317 } else {
3318 break;
3319 }
3320 }
3321 for rule in &config.decrease_indent_patterns {
3322 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3323 let row_start_column = self.indent_size_for_line(row).len;
3324 let basis_row = rule
3325 .valid_after
3326 .iter()
3327 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3328 .flatten()
3329 .filter(|start_point| start_point.column <= row_start_column)
3330 .max_by_key(|start_point| start_point.row);
3331 if let Some(outdent_to_row) = basis_row {
3332 regex_outdent_map.insert(row, outdent_to_row.row);
3333 }
3334 break;
3335 }
3336 }
3337 },
3338 );
3339
3340 let mut indent_changes = indent_change_rows.into_iter().peekable();
3341 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3342 prev_non_blank_row.unwrap_or(0)
3343 } else {
3344 row_range.start.saturating_sub(1)
3345 };
3346
3347 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3348 Some(row_range.map(move |row| {
3349 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3350
3351 let mut indent_from_prev_row = false;
3352 let mut outdent_from_prev_row = false;
3353 let mut outdent_to_row = u32::MAX;
3354 let mut from_regex = false;
3355
3356 while let Some((indent_row, delta)) = indent_changes.peek() {
3357 match indent_row.cmp(&row) {
3358 Ordering::Equal => match delta {
3359 Ordering::Less => {
3360 from_regex = true;
3361 outdent_from_prev_row = true
3362 }
3363 Ordering::Greater => {
3364 indent_from_prev_row = true;
3365 from_regex = true
3366 }
3367 _ => {}
3368 },
3369
3370 Ordering::Greater => break,
3371 Ordering::Less => {}
3372 }
3373
3374 indent_changes.next();
3375 }
3376
3377 for range in &indent_ranges {
3378 if range.start.row >= row {
3379 break;
3380 }
3381 if range.start.row == prev_row && range.end > row_start {
3382 indent_from_prev_row = true;
3383 }
3384 if range.end > prev_row_start && range.end <= row_start {
3385 outdent_to_row = outdent_to_row.min(range.start.row);
3386 }
3387 }
3388
3389 if let Some(basis_row) = regex_outdent_map.get(&row) {
3390 indent_from_prev_row = false;
3391 outdent_to_row = *basis_row;
3392 from_regex = true;
3393 }
3394
3395 let within_error = error_ranges
3396 .iter()
3397 .any(|e| e.start.row < row && e.end > row_start);
3398
3399 let suggestion = if outdent_to_row == prev_row
3400 || (outdent_from_prev_row && indent_from_prev_row)
3401 {
3402 Some(IndentSuggestion {
3403 basis_row: prev_row,
3404 delta: Ordering::Equal,
3405 within_error: within_error && !from_regex,
3406 })
3407 } else if indent_from_prev_row {
3408 Some(IndentSuggestion {
3409 basis_row: prev_row,
3410 delta: Ordering::Greater,
3411 within_error: within_error && !from_regex,
3412 })
3413 } else if outdent_to_row < prev_row {
3414 Some(IndentSuggestion {
3415 basis_row: outdent_to_row,
3416 delta: Ordering::Equal,
3417 within_error: within_error && !from_regex,
3418 })
3419 } else if outdent_from_prev_row {
3420 Some(IndentSuggestion {
3421 basis_row: prev_row,
3422 delta: Ordering::Less,
3423 within_error: within_error && !from_regex,
3424 })
3425 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3426 {
3427 Some(IndentSuggestion {
3428 basis_row: prev_row,
3429 delta: Ordering::Equal,
3430 within_error: within_error && !from_regex,
3431 })
3432 } else {
3433 None
3434 };
3435
3436 prev_row = row;
3437 prev_row_start = row_start;
3438 suggestion
3439 }))
3440 }
3441
3442 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3443 while row > 0 {
3444 row -= 1;
3445 if !self.is_line_blank(row) {
3446 return Some(row);
3447 }
3448 }
3449 None
3450 }
3451
3452 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3453 let captures = self.syntax.captures(range, &self.text, |grammar| {
3454 grammar
3455 .highlights_config
3456 .as_ref()
3457 .map(|config| &config.query)
3458 });
3459 let highlight_maps = captures
3460 .grammars()
3461 .iter()
3462 .map(|grammar| grammar.highlight_map())
3463 .collect();
3464 (captures, highlight_maps)
3465 }
3466
3467 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3468 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3469 /// returned in chunks where each chunk has a single syntax highlighting style and
3470 /// diagnostic status.
3471 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3472 let range = range.start.to_offset(self)..range.end.to_offset(self);
3473
3474 let mut syntax = None;
3475 if language_aware {
3476 syntax = Some(self.get_highlights(range.clone()));
3477 }
3478 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3479 let diagnostics = language_aware;
3480 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3481 }
3482
3483 pub fn highlighted_text_for_range<T: ToOffset>(
3484 &self,
3485 range: Range<T>,
3486 override_style: Option<HighlightStyle>,
3487 syntax_theme: &SyntaxTheme,
3488 ) -> HighlightedText {
3489 HighlightedText::from_buffer_range(
3490 range,
3491 &self.text,
3492 &self.syntax,
3493 override_style,
3494 syntax_theme,
3495 )
3496 }
3497
3498 /// Invokes the given callback for each line of text in the given range of the buffer.
3499 /// Uses callback to avoid allocating a string for each line.
3500 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3501 let mut line = String::new();
3502 let mut row = range.start.row;
3503 for chunk in self
3504 .as_rope()
3505 .chunks_in_range(range.to_offset(self))
3506 .chain(["\n"])
3507 {
3508 for (newline_ix, text) in chunk.split('\n').enumerate() {
3509 if newline_ix > 0 {
3510 callback(row, &line);
3511 row += 1;
3512 line.clear();
3513 }
3514 line.push_str(text);
3515 }
3516 }
3517 }
3518
3519 /// Iterates over every [`SyntaxLayer`] in the buffer.
3520 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3521 self.syntax_layers_for_range(0..self.len(), true)
3522 }
3523
3524 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3525 let offset = position.to_offset(self);
3526 self.syntax_layers_for_range(offset..offset, false)
3527 .filter(|l| {
3528 if let Some(ranges) = l.included_sub_ranges {
3529 ranges.iter().any(|range| {
3530 let start = range.start.to_offset(self);
3531 start <= offset && {
3532 let end = range.end.to_offset(self);
3533 offset < end
3534 }
3535 })
3536 } else {
3537 l.node().start_byte() <= offset && l.node().end_byte() > offset
3538 }
3539 })
3540 .last()
3541 }
3542
3543 pub fn syntax_layers_for_range<D: ToOffset>(
3544 &self,
3545 range: Range<D>,
3546 include_hidden: bool,
3547 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3548 self.syntax
3549 .layers_for_range(range, &self.text, include_hidden)
3550 }
3551
3552 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3553 &self,
3554 range: Range<D>,
3555 ) -> Option<SyntaxLayer<'_>> {
3556 let range = range.to_offset(self);
3557 self.syntax
3558 .layers_for_range(range, &self.text, false)
3559 .max_by(|a, b| {
3560 if a.depth != b.depth {
3561 a.depth.cmp(&b.depth)
3562 } else if a.offset.0 != b.offset.0 {
3563 a.offset.0.cmp(&b.offset.0)
3564 } else {
3565 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3566 }
3567 })
3568 }
3569
3570 /// Returns the main [`Language`].
3571 pub fn language(&self) -> Option<&Arc<Language>> {
3572 self.language.as_ref()
3573 }
3574
3575 /// Returns the [`Language`] at the given location.
3576 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3577 self.syntax_layer_at(position)
3578 .map(|info| info.language)
3579 .or(self.language.as_ref())
3580 }
3581
3582 /// Returns the settings for the language at the given location.
3583 pub fn settings_at<'a, D: ToOffset>(
3584 &'a self,
3585 position: D,
3586 cx: &'a App,
3587 ) -> Cow<'a, LanguageSettings> {
3588 language_settings(
3589 self.language_at(position).map(|l| l.name()),
3590 self.file.as_ref(),
3591 cx,
3592 )
3593 }
3594
3595 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3596 CharClassifier::new(self.language_scope_at(point))
3597 }
3598
3599 /// Returns the [`LanguageScope`] at the given location.
3600 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3601 let offset = position.to_offset(self);
3602 let mut scope = None;
3603 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3604
3605 // Use the layer that has the smallest node intersecting the given point.
3606 for layer in self
3607 .syntax
3608 .layers_for_range(offset..offset, &self.text, false)
3609 {
3610 let mut cursor = layer.node().walk();
3611
3612 let mut range = None;
3613 loop {
3614 let child_range = cursor.node().byte_range();
3615 if !child_range.contains(&offset) {
3616 break;
3617 }
3618
3619 range = Some(child_range);
3620 if cursor.goto_first_child_for_byte(offset).is_none() {
3621 break;
3622 }
3623 }
3624
3625 if let Some(range) = range
3626 && smallest_range_and_depth.as_ref().is_none_or(
3627 |(smallest_range, smallest_range_depth)| {
3628 if layer.depth > *smallest_range_depth {
3629 true
3630 } else if layer.depth == *smallest_range_depth {
3631 range.len() < smallest_range.len()
3632 } else {
3633 false
3634 }
3635 },
3636 )
3637 {
3638 smallest_range_and_depth = Some((range, layer.depth));
3639 scope = Some(LanguageScope {
3640 language: layer.language.clone(),
3641 override_id: layer.override_id(offset, &self.text),
3642 });
3643 }
3644 }
3645
3646 scope.or_else(|| {
3647 self.language.clone().map(|language| LanguageScope {
3648 language,
3649 override_id: None,
3650 })
3651 })
3652 }
3653
3654 /// Returns a tuple of the range and character kind of the word
3655 /// surrounding the given position.
3656 pub fn surrounding_word<T: ToOffset>(
3657 &self,
3658 start: T,
3659 scope_context: Option<CharScopeContext>,
3660 ) -> (Range<usize>, Option<CharKind>) {
3661 let mut start = start.to_offset(self);
3662 let mut end = start;
3663 let mut next_chars = self.chars_at(start).take(128).peekable();
3664 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3665
3666 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3667 let word_kind = cmp::max(
3668 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3669 next_chars.peek().copied().map(|c| classifier.kind(c)),
3670 );
3671
3672 for ch in prev_chars {
3673 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3674 start -= ch.len_utf8();
3675 } else {
3676 break;
3677 }
3678 }
3679
3680 for ch in next_chars {
3681 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3682 end += ch.len_utf8();
3683 } else {
3684 break;
3685 }
3686 }
3687
3688 (start..end, word_kind)
3689 }
3690
3691 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3692 /// range. When `require_larger` is true, the node found must be larger than the query range.
3693 ///
3694 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3695 /// be moved to the root of the tree.
3696 fn goto_node_enclosing_range(
3697 cursor: &mut tree_sitter::TreeCursor,
3698 query_range: &Range<usize>,
3699 require_larger: bool,
3700 ) -> bool {
3701 let mut ascending = false;
3702 loop {
3703 let mut range = cursor.node().byte_range();
3704 if query_range.is_empty() {
3705 // When the query range is empty and the current node starts after it, move to the
3706 // previous sibling to find the node the containing node.
3707 if range.start > query_range.start {
3708 cursor.goto_previous_sibling();
3709 range = cursor.node().byte_range();
3710 }
3711 } else {
3712 // When the query range is non-empty and the current node ends exactly at the start,
3713 // move to the next sibling to find a node that extends beyond the start.
3714 if range.end == query_range.start {
3715 cursor.goto_next_sibling();
3716 range = cursor.node().byte_range();
3717 }
3718 }
3719
3720 let encloses = range.contains_inclusive(query_range)
3721 && (!require_larger || range.len() > query_range.len());
3722 if !encloses {
3723 ascending = true;
3724 if !cursor.goto_parent() {
3725 return false;
3726 }
3727 continue;
3728 } else if ascending {
3729 return true;
3730 }
3731
3732 // Descend into the current node.
3733 if cursor
3734 .goto_first_child_for_byte(query_range.start)
3735 .is_none()
3736 {
3737 return true;
3738 }
3739 }
3740 }
3741
3742 pub fn syntax_ancestor<'a, T: ToOffset>(
3743 &'a self,
3744 range: Range<T>,
3745 ) -> Option<tree_sitter::Node<'a>> {
3746 let range = range.start.to_offset(self)..range.end.to_offset(self);
3747 let mut result: Option<tree_sitter::Node<'a>> = None;
3748 for layer in self
3749 .syntax
3750 .layers_for_range(range.clone(), &self.text, true)
3751 {
3752 let mut cursor = layer.node().walk();
3753
3754 // Find the node that both contains the range and is larger than it.
3755 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3756 continue;
3757 }
3758
3759 let left_node = cursor.node();
3760 let mut layer_result = left_node;
3761
3762 // For an empty range, try to find another node immediately to the right of the range.
3763 if left_node.end_byte() == range.start {
3764 let mut right_node = None;
3765 while !cursor.goto_next_sibling() {
3766 if !cursor.goto_parent() {
3767 break;
3768 }
3769 }
3770
3771 while cursor.node().start_byte() == range.start {
3772 right_node = Some(cursor.node());
3773 if !cursor.goto_first_child() {
3774 break;
3775 }
3776 }
3777
3778 // If there is a candidate node on both sides of the (empty) range, then
3779 // decide between the two by favoring a named node over an anonymous token.
3780 // If both nodes are the same in that regard, favor the right one.
3781 if let Some(right_node) = right_node
3782 && (right_node.is_named() || !left_node.is_named())
3783 {
3784 layer_result = right_node;
3785 }
3786 }
3787
3788 if let Some(previous_result) = &result
3789 && previous_result.byte_range().len() < layer_result.byte_range().len()
3790 {
3791 continue;
3792 }
3793 result = Some(layer_result);
3794 }
3795
3796 result
3797 }
3798
3799 /// Find the previous sibling syntax node at the given range.
3800 ///
3801 /// This function locates the syntax node that precedes the node containing
3802 /// the given range. It searches hierarchically by:
3803 /// 1. Finding the node that contains the given range
3804 /// 2. Looking for the previous sibling at the same tree level
3805 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3806 ///
3807 /// Returns `None` if there is no previous sibling at any ancestor level.
3808 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3809 &'a self,
3810 range: Range<T>,
3811 ) -> Option<tree_sitter::Node<'a>> {
3812 let range = range.start.to_offset(self)..range.end.to_offset(self);
3813 let mut result: Option<tree_sitter::Node<'a>> = None;
3814
3815 for layer in self
3816 .syntax
3817 .layers_for_range(range.clone(), &self.text, true)
3818 {
3819 let mut cursor = layer.node().walk();
3820
3821 // Find the node that contains the range
3822 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3823 continue;
3824 }
3825
3826 // Look for the previous sibling, moving up ancestor levels if needed
3827 loop {
3828 if cursor.goto_previous_sibling() {
3829 let layer_result = cursor.node();
3830
3831 if let Some(previous_result) = &result {
3832 if previous_result.byte_range().end < layer_result.byte_range().end {
3833 continue;
3834 }
3835 }
3836 result = Some(layer_result);
3837 break;
3838 }
3839
3840 // No sibling found at this level, try moving up to parent
3841 if !cursor.goto_parent() {
3842 break;
3843 }
3844 }
3845 }
3846
3847 result
3848 }
3849
3850 /// Find the next sibling syntax node at the given range.
3851 ///
3852 /// This function locates the syntax node that follows the node containing
3853 /// the given range. It searches hierarchically by:
3854 /// 1. Finding the node that contains the given range
3855 /// 2. Looking for the next sibling at the same tree level
3856 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3857 ///
3858 /// Returns `None` if there is no next sibling at any ancestor level.
3859 pub fn syntax_next_sibling<'a, T: ToOffset>(
3860 &'a self,
3861 range: Range<T>,
3862 ) -> Option<tree_sitter::Node<'a>> {
3863 let range = range.start.to_offset(self)..range.end.to_offset(self);
3864 let mut result: Option<tree_sitter::Node<'a>> = None;
3865
3866 for layer in self
3867 .syntax
3868 .layers_for_range(range.clone(), &self.text, true)
3869 {
3870 let mut cursor = layer.node().walk();
3871
3872 // Find the node that contains the range
3873 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3874 continue;
3875 }
3876
3877 // Look for the next sibling, moving up ancestor levels if needed
3878 loop {
3879 if cursor.goto_next_sibling() {
3880 let layer_result = cursor.node();
3881
3882 if let Some(previous_result) = &result {
3883 if previous_result.byte_range().start > layer_result.byte_range().start {
3884 continue;
3885 }
3886 }
3887 result = Some(layer_result);
3888 break;
3889 }
3890
3891 // No sibling found at this level, try moving up to parent
3892 if !cursor.goto_parent() {
3893 break;
3894 }
3895 }
3896 }
3897
3898 result
3899 }
3900
3901 /// Returns the root syntax node within the given row
3902 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3903 let start_offset = position.to_offset(self);
3904
3905 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3906
3907 let layer = self
3908 .syntax
3909 .layers_for_range(start_offset..start_offset, &self.text, true)
3910 .next()?;
3911
3912 let mut cursor = layer.node().walk();
3913
3914 // Descend to the first leaf that touches the start of the range.
3915 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3916 if cursor.node().end_byte() == start_offset {
3917 cursor.goto_next_sibling();
3918 }
3919 }
3920
3921 // Ascend to the root node within the same row.
3922 while cursor.goto_parent() {
3923 if cursor.node().start_position().row != row {
3924 break;
3925 }
3926 }
3927
3928 Some(cursor.node())
3929 }
3930
3931 /// Returns the outline for the buffer.
3932 ///
3933 /// This method allows passing an optional [`SyntaxTheme`] to
3934 /// syntax-highlight the returned symbols.
3935 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3936 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3937 }
3938
3939 /// Returns all the symbols that contain the given position.
3940 ///
3941 /// This method allows passing an optional [`SyntaxTheme`] to
3942 /// syntax-highlight the returned symbols.
3943 pub fn symbols_containing<T: ToOffset>(
3944 &self,
3945 position: T,
3946 theme: Option<&SyntaxTheme>,
3947 ) -> Vec<OutlineItem<Anchor>> {
3948 let position = position.to_offset(self);
3949 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3950 let end = self.clip_offset(position + 1, Bias::Right);
3951 let mut items = self.outline_items_containing(start..end, false, theme);
3952 let mut prev_depth = None;
3953 items.retain(|item| {
3954 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3955 prev_depth = Some(item.depth);
3956 result
3957 });
3958 items
3959 }
3960
3961 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3962 let range = range.to_offset(self);
3963 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3964 grammar.outline_config.as_ref().map(|c| &c.query)
3965 });
3966 let configs = matches
3967 .grammars()
3968 .iter()
3969 .map(|g| g.outline_config.as_ref().unwrap())
3970 .collect::<Vec<_>>();
3971
3972 while let Some(mat) = matches.peek() {
3973 let config = &configs[mat.grammar_index];
3974 let containing_item_node = maybe!({
3975 let item_node = mat.captures.iter().find_map(|cap| {
3976 if cap.index == config.item_capture_ix {
3977 Some(cap.node)
3978 } else {
3979 None
3980 }
3981 })?;
3982
3983 let item_byte_range = item_node.byte_range();
3984 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3985 None
3986 } else {
3987 Some(item_node)
3988 }
3989 });
3990
3991 if let Some(item_node) = containing_item_node {
3992 return Some(
3993 Point::from_ts_point(item_node.start_position())
3994 ..Point::from_ts_point(item_node.end_position()),
3995 );
3996 }
3997
3998 matches.advance();
3999 }
4000 None
4001 }
4002
4003 pub fn outline_items_containing<T: ToOffset>(
4004 &self,
4005 range: Range<T>,
4006 include_extra_context: bool,
4007 theme: Option<&SyntaxTheme>,
4008 ) -> Vec<OutlineItem<Anchor>> {
4009 self.outline_items_containing_internal(
4010 range,
4011 include_extra_context,
4012 theme,
4013 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4014 )
4015 }
4016
4017 pub fn outline_items_as_points_containing<T: ToOffset>(
4018 &self,
4019 range: Range<T>,
4020 include_extra_context: bool,
4021 theme: Option<&SyntaxTheme>,
4022 ) -> Vec<OutlineItem<Point>> {
4023 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4024 range
4025 })
4026 }
4027
4028 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4029 &self,
4030 range: Range<T>,
4031 include_extra_context: bool,
4032 theme: Option<&SyntaxTheme>,
4033 ) -> Vec<OutlineItem<usize>> {
4034 self.outline_items_containing_internal(
4035 range,
4036 include_extra_context,
4037 theme,
4038 |buffer, range| range.to_offset(buffer),
4039 )
4040 }
4041
4042 fn outline_items_containing_internal<T: ToOffset, U>(
4043 &self,
4044 range: Range<T>,
4045 include_extra_context: bool,
4046 theme: Option<&SyntaxTheme>,
4047 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4048 ) -> Vec<OutlineItem<U>> {
4049 let range = range.to_offset(self);
4050 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4051 grammar.outline_config.as_ref().map(|c| &c.query)
4052 });
4053
4054 let mut items = Vec::new();
4055 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4056 while let Some(mat) = matches.peek() {
4057 let config = matches.grammars()[mat.grammar_index]
4058 .outline_config
4059 .as_ref()
4060 .unwrap();
4061 if let Some(item) =
4062 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4063 {
4064 items.push(item);
4065 } else if let Some(capture) = mat
4066 .captures
4067 .iter()
4068 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4069 {
4070 let capture_range = capture.node.start_position()..capture.node.end_position();
4071 let mut capture_row_range =
4072 capture_range.start.row as u32..capture_range.end.row as u32;
4073 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4074 {
4075 capture_row_range.end -= 1;
4076 }
4077 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4078 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4079 last_row_range.end = capture_row_range.end;
4080 } else {
4081 annotation_row_ranges.push(capture_row_range);
4082 }
4083 } else {
4084 annotation_row_ranges.push(capture_row_range);
4085 }
4086 }
4087 matches.advance();
4088 }
4089
4090 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4091
4092 // Assign depths based on containment relationships and convert to anchors.
4093 let mut item_ends_stack = Vec::<Point>::new();
4094 let mut anchor_items = Vec::new();
4095 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4096 for item in items {
4097 while let Some(last_end) = item_ends_stack.last().copied() {
4098 if last_end < item.range.end {
4099 item_ends_stack.pop();
4100 } else {
4101 break;
4102 }
4103 }
4104
4105 let mut annotation_row_range = None;
4106 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4107 let row_preceding_item = item.range.start.row.saturating_sub(1);
4108 if next_annotation_row_range.end < row_preceding_item {
4109 annotation_row_ranges.next();
4110 } else {
4111 if next_annotation_row_range.end == row_preceding_item {
4112 annotation_row_range = Some(next_annotation_row_range.clone());
4113 annotation_row_ranges.next();
4114 }
4115 break;
4116 }
4117 }
4118
4119 anchor_items.push(OutlineItem {
4120 depth: item_ends_stack.len(),
4121 range: range_callback(self, item.range.clone()),
4122 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4123 text: item.text,
4124 highlight_ranges: item.highlight_ranges,
4125 name_ranges: item.name_ranges,
4126 body_range: item.body_range.map(|r| range_callback(self, r)),
4127 annotation_range: annotation_row_range.map(|annotation_range| {
4128 let point_range = Point::new(annotation_range.start, 0)
4129 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4130 range_callback(self, point_range)
4131 }),
4132 });
4133 item_ends_stack.push(item.range.end);
4134 }
4135
4136 anchor_items
4137 }
4138
4139 fn next_outline_item(
4140 &self,
4141 config: &OutlineConfig,
4142 mat: &SyntaxMapMatch,
4143 range: &Range<usize>,
4144 include_extra_context: bool,
4145 theme: Option<&SyntaxTheme>,
4146 ) -> Option<OutlineItem<Point>> {
4147 let item_node = mat.captures.iter().find_map(|cap| {
4148 if cap.index == config.item_capture_ix {
4149 Some(cap.node)
4150 } else {
4151 None
4152 }
4153 })?;
4154
4155 let item_byte_range = item_node.byte_range();
4156 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4157 return None;
4158 }
4159 let item_point_range = Point::from_ts_point(item_node.start_position())
4160 ..Point::from_ts_point(item_node.end_position());
4161
4162 let mut open_point = None;
4163 let mut close_point = None;
4164
4165 let mut buffer_ranges = Vec::new();
4166 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4167 let mut range = node.start_byte()..node.end_byte();
4168 let start = node.start_position();
4169 if node.end_position().row > start.row {
4170 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4171 }
4172
4173 if !range.is_empty() {
4174 buffer_ranges.push((range, node_is_name));
4175 }
4176 };
4177
4178 for capture in mat.captures {
4179 if capture.index == config.name_capture_ix {
4180 add_to_buffer_ranges(capture.node, true);
4181 } else if Some(capture.index) == config.context_capture_ix
4182 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4183 {
4184 add_to_buffer_ranges(capture.node, false);
4185 } else {
4186 if Some(capture.index) == config.open_capture_ix {
4187 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4188 } else if Some(capture.index) == config.close_capture_ix {
4189 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4190 }
4191 }
4192 }
4193
4194 if buffer_ranges.is_empty() {
4195 return None;
4196 }
4197 let source_range_for_text =
4198 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4199
4200 let mut text = String::new();
4201 let mut highlight_ranges = Vec::new();
4202 let mut name_ranges = Vec::new();
4203 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4204 let mut last_buffer_range_end = 0;
4205 for (buffer_range, is_name) in buffer_ranges {
4206 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4207 if space_added {
4208 text.push(' ');
4209 }
4210 let before_append_len = text.len();
4211 let mut offset = buffer_range.start;
4212 chunks.seek(buffer_range.clone());
4213 for mut chunk in chunks.by_ref() {
4214 if chunk.text.len() > buffer_range.end - offset {
4215 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4216 offset = buffer_range.end;
4217 } else {
4218 offset += chunk.text.len();
4219 }
4220 let style = chunk
4221 .syntax_highlight_id
4222 .zip(theme)
4223 .and_then(|(highlight, theme)| highlight.style(theme));
4224 if let Some(style) = style {
4225 let start = text.len();
4226 let end = start + chunk.text.len();
4227 highlight_ranges.push((start..end, style));
4228 }
4229 text.push_str(chunk.text);
4230 if offset >= buffer_range.end {
4231 break;
4232 }
4233 }
4234 if is_name {
4235 let after_append_len = text.len();
4236 let start = if space_added && !name_ranges.is_empty() {
4237 before_append_len - 1
4238 } else {
4239 before_append_len
4240 };
4241 name_ranges.push(start..after_append_len);
4242 }
4243 last_buffer_range_end = buffer_range.end;
4244 }
4245
4246 Some(OutlineItem {
4247 depth: 0, // We'll calculate the depth later
4248 range: item_point_range,
4249 source_range_for_text: source_range_for_text.to_point(self),
4250 text,
4251 highlight_ranges,
4252 name_ranges,
4253 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4254 annotation_range: None,
4255 })
4256 }
4257
4258 pub fn function_body_fold_ranges<T: ToOffset>(
4259 &self,
4260 within: Range<T>,
4261 ) -> impl Iterator<Item = Range<usize>> + '_ {
4262 self.text_object_ranges(within, TreeSitterOptions::default())
4263 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4264 }
4265
4266 /// For each grammar in the language, runs the provided
4267 /// [`tree_sitter::Query`] against the given range.
4268 pub fn matches(
4269 &self,
4270 range: Range<usize>,
4271 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4272 ) -> SyntaxMapMatches<'_> {
4273 self.syntax.matches(range, self, query)
4274 }
4275
4276 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4277 /// Hence, may return more bracket pairs than the range contains.
4278 ///
4279 /// Will omit known chunks.
4280 /// The resulting bracket match collections are not ordered.
4281 pub fn fetch_bracket_ranges(
4282 &self,
4283 range: Range<usize>,
4284 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4285 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4286 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4287
4288 let known_chunks = match known_chunks {
4289 Some((known_version, known_chunks)) => {
4290 if !tree_sitter_data
4291 .chunks
4292 .version()
4293 .changed_since(known_version)
4294 {
4295 known_chunks.clone()
4296 } else {
4297 HashSet::default()
4298 }
4299 }
4300 None => HashSet::default(),
4301 };
4302
4303 let mut new_bracket_matches = HashMap::default();
4304 let mut all_bracket_matches = HashMap::default();
4305
4306 for chunk in tree_sitter_data
4307 .chunks
4308 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4309 {
4310 if known_chunks.contains(&chunk.row_range()) {
4311 continue;
4312 }
4313 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4314 continue;
4315 };
4316 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4317
4318 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4319 Some(cached_brackets) => cached_brackets,
4320 None => {
4321 let mut all_brackets = Vec::new();
4322 let mut opens = Vec::new();
4323 let mut color_pairs = Vec::new();
4324
4325 let mut matches =
4326 self.syntax
4327 .matches(chunk_range.clone(), &self.text, |grammar| {
4328 grammar.brackets_config.as_ref().map(|c| &c.query)
4329 });
4330 let configs = matches
4331 .grammars()
4332 .iter()
4333 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4334 .collect::<Vec<_>>();
4335
4336 while let Some(mat) = matches.peek() {
4337 let mut open = None;
4338 let mut close = None;
4339 let syntax_layer_depth = mat.depth;
4340 let config = configs[mat.grammar_index];
4341 let pattern = &config.patterns[mat.pattern_index];
4342 for capture in mat.captures {
4343 if capture.index == config.open_capture_ix {
4344 open = Some(capture.node.byte_range());
4345 } else if capture.index == config.close_capture_ix {
4346 close = Some(capture.node.byte_range());
4347 }
4348 }
4349
4350 matches.advance();
4351
4352 let Some((open_range, close_range)) = open.zip(close) else {
4353 continue;
4354 };
4355
4356 let bracket_range = open_range.start..=close_range.end;
4357 if !bracket_range.overlaps(&chunk_range) {
4358 continue;
4359 }
4360
4361 let index = all_brackets.len();
4362 all_brackets.push(BracketMatch {
4363 open_range: open_range.clone(),
4364 close_range: close_range.clone(),
4365 newline_only: pattern.newline_only,
4366 syntax_layer_depth,
4367 color_index: None,
4368 });
4369
4370 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4371 // bracket will match the entire tag with all text inside.
4372 // For now, avoid highlighting any pair that has more than single char in each bracket.
4373 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4374 let should_color = !pattern.rainbow_exclude
4375 && (open_range.len() == 1 || close_range.len() == 1);
4376 if should_color {
4377 opens.push(open_range.clone());
4378 color_pairs.push((open_range, close_range, index));
4379 }
4380 }
4381
4382 opens.sort_by_key(|r| (r.start, r.end));
4383 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4384 color_pairs.sort_by_key(|(_, close, _)| close.end);
4385
4386 let mut open_stack = Vec::new();
4387 let mut open_index = 0;
4388 for (open, close, index) in color_pairs {
4389 while open_index < opens.len() && opens[open_index].start < close.start {
4390 open_stack.push(opens[open_index].clone());
4391 open_index += 1;
4392 }
4393
4394 if open_stack.last() == Some(&open) {
4395 let depth_index = open_stack.len() - 1;
4396 all_brackets[index].color_index = Some(depth_index);
4397 open_stack.pop();
4398 }
4399 }
4400
4401 all_brackets.sort_by_key(|bracket_match| {
4402 (bracket_match.open_range.start, bracket_match.open_range.end)
4403 });
4404 new_bracket_matches.insert(chunk.id, all_brackets.clone());
4405 all_brackets
4406 }
4407 };
4408 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4409 }
4410
4411 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4412 if latest_tree_sitter_data.chunks.version() == &self.version {
4413 for (chunk_id, new_matches) in new_bracket_matches {
4414 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4415 if old_chunks.is_none() {
4416 *old_chunks = Some(new_matches);
4417 }
4418 }
4419 }
4420
4421 all_bracket_matches
4422 }
4423
4424 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4425 let mut tree_sitter_data = self.tree_sitter_data.lock();
4426 if self
4427 .version
4428 .changed_since(tree_sitter_data.chunks.version())
4429 {
4430 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4431 }
4432 tree_sitter_data
4433 }
4434
4435 pub fn all_bracket_ranges(
4436 &self,
4437 range: Range<usize>,
4438 ) -> impl Iterator<Item = BracketMatch<usize>> {
4439 self.fetch_bracket_ranges(range.clone(), None)
4440 .into_values()
4441 .flatten()
4442 .filter(move |bracket_match| {
4443 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4444 bracket_range.overlaps(&range)
4445 })
4446 }
4447
4448 /// Returns bracket range pairs overlapping or adjacent to `range`
4449 pub fn bracket_ranges<T: ToOffset>(
4450 &self,
4451 range: Range<T>,
4452 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4453 // Find bracket pairs that *inclusively* contain the given range.
4454 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4455 self.all_bracket_ranges(range)
4456 .filter(|pair| !pair.newline_only)
4457 }
4458
4459 pub fn debug_variables_query<T: ToOffset>(
4460 &self,
4461 range: Range<T>,
4462 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4463 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4464
4465 let mut matches = self.syntax.matches_with_options(
4466 range.clone(),
4467 &self.text,
4468 TreeSitterOptions::default(),
4469 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4470 );
4471
4472 let configs = matches
4473 .grammars()
4474 .iter()
4475 .map(|grammar| grammar.debug_variables_config.as_ref())
4476 .collect::<Vec<_>>();
4477
4478 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4479
4480 iter::from_fn(move || {
4481 loop {
4482 while let Some(capture) = captures.pop() {
4483 if capture.0.overlaps(&range) {
4484 return Some(capture);
4485 }
4486 }
4487
4488 let mat = matches.peek()?;
4489
4490 let Some(config) = configs[mat.grammar_index].as_ref() else {
4491 matches.advance();
4492 continue;
4493 };
4494
4495 for capture in mat.captures {
4496 let Some(ix) = config
4497 .objects_by_capture_ix
4498 .binary_search_by_key(&capture.index, |e| e.0)
4499 .ok()
4500 else {
4501 continue;
4502 };
4503 let text_object = config.objects_by_capture_ix[ix].1;
4504 let byte_range = capture.node.byte_range();
4505
4506 let mut found = false;
4507 for (range, existing) in captures.iter_mut() {
4508 if existing == &text_object {
4509 range.start = range.start.min(byte_range.start);
4510 range.end = range.end.max(byte_range.end);
4511 found = true;
4512 break;
4513 }
4514 }
4515
4516 if !found {
4517 captures.push((byte_range, text_object));
4518 }
4519 }
4520
4521 matches.advance();
4522 }
4523 })
4524 }
4525
4526 pub fn text_object_ranges<T: ToOffset>(
4527 &self,
4528 range: Range<T>,
4529 options: TreeSitterOptions,
4530 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4531 let range =
4532 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4533
4534 let mut matches =
4535 self.syntax
4536 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4537 grammar.text_object_config.as_ref().map(|c| &c.query)
4538 });
4539
4540 let configs = matches
4541 .grammars()
4542 .iter()
4543 .map(|grammar| grammar.text_object_config.as_ref())
4544 .collect::<Vec<_>>();
4545
4546 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4547
4548 iter::from_fn(move || {
4549 loop {
4550 while let Some(capture) = captures.pop() {
4551 if capture.0.overlaps(&range) {
4552 return Some(capture);
4553 }
4554 }
4555
4556 let mat = matches.peek()?;
4557
4558 let Some(config) = configs[mat.grammar_index].as_ref() else {
4559 matches.advance();
4560 continue;
4561 };
4562
4563 for capture in mat.captures {
4564 let Some(ix) = config
4565 .text_objects_by_capture_ix
4566 .binary_search_by_key(&capture.index, |e| e.0)
4567 .ok()
4568 else {
4569 continue;
4570 };
4571 let text_object = config.text_objects_by_capture_ix[ix].1;
4572 let byte_range = capture.node.byte_range();
4573
4574 let mut found = false;
4575 for (range, existing) in captures.iter_mut() {
4576 if existing == &text_object {
4577 range.start = range.start.min(byte_range.start);
4578 range.end = range.end.max(byte_range.end);
4579 found = true;
4580 break;
4581 }
4582 }
4583
4584 if !found {
4585 captures.push((byte_range, text_object));
4586 }
4587 }
4588
4589 matches.advance();
4590 }
4591 })
4592 }
4593
4594 /// Returns enclosing bracket ranges containing the given range
4595 pub fn enclosing_bracket_ranges<T: ToOffset>(
4596 &self,
4597 range: Range<T>,
4598 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4599 let range = range.start.to_offset(self)..range.end.to_offset(self);
4600
4601 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4602 let max_depth = result
4603 .iter()
4604 .map(|mat| mat.syntax_layer_depth)
4605 .max()
4606 .unwrap_or(0);
4607 result.into_iter().filter(move |pair| {
4608 pair.open_range.start <= range.start
4609 && pair.close_range.end >= range.end
4610 && pair.syntax_layer_depth == max_depth
4611 })
4612 }
4613
4614 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4615 ///
4616 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4617 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4618 &self,
4619 range: Range<T>,
4620 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4621 ) -> Option<(Range<usize>, Range<usize>)> {
4622 let range = range.start.to_offset(self)..range.end.to_offset(self);
4623
4624 // Get the ranges of the innermost pair of brackets.
4625 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4626
4627 for pair in self.enclosing_bracket_ranges(range) {
4628 if let Some(range_filter) = range_filter
4629 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4630 {
4631 continue;
4632 }
4633
4634 let len = pair.close_range.end - pair.open_range.start;
4635
4636 if let Some((existing_open, existing_close)) = &result {
4637 let existing_len = existing_close.end - existing_open.start;
4638 if len > existing_len {
4639 continue;
4640 }
4641 }
4642
4643 result = Some((pair.open_range, pair.close_range));
4644 }
4645
4646 result
4647 }
4648
4649 /// Returns anchor ranges for any matches of the redaction query.
4650 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4651 /// will be run on the relevant section of the buffer.
4652 pub fn redacted_ranges<T: ToOffset>(
4653 &self,
4654 range: Range<T>,
4655 ) -> impl Iterator<Item = Range<usize>> + '_ {
4656 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4657 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4658 grammar
4659 .redactions_config
4660 .as_ref()
4661 .map(|config| &config.query)
4662 });
4663
4664 let configs = syntax_matches
4665 .grammars()
4666 .iter()
4667 .map(|grammar| grammar.redactions_config.as_ref())
4668 .collect::<Vec<_>>();
4669
4670 iter::from_fn(move || {
4671 let redacted_range = syntax_matches
4672 .peek()
4673 .and_then(|mat| {
4674 configs[mat.grammar_index].and_then(|config| {
4675 mat.captures
4676 .iter()
4677 .find(|capture| capture.index == config.redaction_capture_ix)
4678 })
4679 })
4680 .map(|mat| mat.node.byte_range());
4681 syntax_matches.advance();
4682 redacted_range
4683 })
4684 }
4685
4686 pub fn injections_intersecting_range<T: ToOffset>(
4687 &self,
4688 range: Range<T>,
4689 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4690 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4691
4692 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4693 grammar
4694 .injection_config
4695 .as_ref()
4696 .map(|config| &config.query)
4697 });
4698
4699 let configs = syntax_matches
4700 .grammars()
4701 .iter()
4702 .map(|grammar| grammar.injection_config.as_ref())
4703 .collect::<Vec<_>>();
4704
4705 iter::from_fn(move || {
4706 let ranges = syntax_matches.peek().and_then(|mat| {
4707 let config = &configs[mat.grammar_index]?;
4708 let content_capture_range = mat.captures.iter().find_map(|capture| {
4709 if capture.index == config.content_capture_ix {
4710 Some(capture.node.byte_range())
4711 } else {
4712 None
4713 }
4714 })?;
4715 let language = self.language_at(content_capture_range.start)?;
4716 Some((content_capture_range, language))
4717 });
4718 syntax_matches.advance();
4719 ranges
4720 })
4721 }
4722
4723 pub fn runnable_ranges(
4724 &self,
4725 offset_range: Range<usize>,
4726 ) -> impl Iterator<Item = RunnableRange> + '_ {
4727 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4728 grammar.runnable_config.as_ref().map(|config| &config.query)
4729 });
4730
4731 let test_configs = syntax_matches
4732 .grammars()
4733 .iter()
4734 .map(|grammar| grammar.runnable_config.as_ref())
4735 .collect::<Vec<_>>();
4736
4737 iter::from_fn(move || {
4738 loop {
4739 let mat = syntax_matches.peek()?;
4740
4741 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4742 let mut run_range = None;
4743 let full_range = mat.captures.iter().fold(
4744 Range {
4745 start: usize::MAX,
4746 end: 0,
4747 },
4748 |mut acc, next| {
4749 let byte_range = next.node.byte_range();
4750 if acc.start > byte_range.start {
4751 acc.start = byte_range.start;
4752 }
4753 if acc.end < byte_range.end {
4754 acc.end = byte_range.end;
4755 }
4756 acc
4757 },
4758 );
4759 if full_range.start > full_range.end {
4760 // We did not find a full spanning range of this match.
4761 return None;
4762 }
4763 let extra_captures: SmallVec<[_; 1]> =
4764 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4765 test_configs
4766 .extra_captures
4767 .get(capture.index as usize)
4768 .cloned()
4769 .and_then(|tag_name| match tag_name {
4770 RunnableCapture::Named(name) => {
4771 Some((capture.node.byte_range(), name))
4772 }
4773 RunnableCapture::Run => {
4774 let _ = run_range.insert(capture.node.byte_range());
4775 None
4776 }
4777 })
4778 }));
4779 let run_range = run_range?;
4780 let tags = test_configs
4781 .query
4782 .property_settings(mat.pattern_index)
4783 .iter()
4784 .filter_map(|property| {
4785 if *property.key == *"tag" {
4786 property
4787 .value
4788 .as_ref()
4789 .map(|value| RunnableTag(value.to_string().into()))
4790 } else {
4791 None
4792 }
4793 })
4794 .collect();
4795 let extra_captures = extra_captures
4796 .into_iter()
4797 .map(|(range, name)| {
4798 (
4799 name.to_string(),
4800 self.text_for_range(range).collect::<String>(),
4801 )
4802 })
4803 .collect();
4804 // All tags should have the same range.
4805 Some(RunnableRange {
4806 run_range,
4807 full_range,
4808 runnable: Runnable {
4809 tags,
4810 language: mat.language,
4811 buffer: self.remote_id(),
4812 },
4813 extra_captures,
4814 buffer_id: self.remote_id(),
4815 })
4816 });
4817
4818 syntax_matches.advance();
4819 if test_range.is_some() {
4820 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4821 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4822 return test_range;
4823 }
4824 }
4825 })
4826 }
4827
4828 /// Returns selections for remote peers intersecting the given range.
4829 #[allow(clippy::type_complexity)]
4830 pub fn selections_in_range(
4831 &self,
4832 range: Range<Anchor>,
4833 include_local: bool,
4834 ) -> impl Iterator<
4835 Item = (
4836 ReplicaId,
4837 bool,
4838 CursorShape,
4839 impl Iterator<Item = &Selection<Anchor>> + '_,
4840 ),
4841 > + '_ {
4842 self.remote_selections
4843 .iter()
4844 .filter(move |(replica_id, set)| {
4845 (include_local || **replica_id != self.text.replica_id())
4846 && !set.selections.is_empty()
4847 })
4848 .map(move |(replica_id, set)| {
4849 let start_ix = match set.selections.binary_search_by(|probe| {
4850 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4851 }) {
4852 Ok(ix) | Err(ix) => ix,
4853 };
4854 let end_ix = match set.selections.binary_search_by(|probe| {
4855 probe.start.cmp(&range.end, self).then(Ordering::Less)
4856 }) {
4857 Ok(ix) | Err(ix) => ix,
4858 };
4859
4860 (
4861 *replica_id,
4862 set.line_mode,
4863 set.cursor_shape,
4864 set.selections[start_ix..end_ix].iter(),
4865 )
4866 })
4867 }
4868
4869 /// Returns if the buffer contains any diagnostics.
4870 pub fn has_diagnostics(&self) -> bool {
4871 !self.diagnostics.is_empty()
4872 }
4873
4874 /// Returns all the diagnostics intersecting the given range.
4875 pub fn diagnostics_in_range<'a, T, O>(
4876 &'a self,
4877 search_range: Range<T>,
4878 reversed: bool,
4879 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4880 where
4881 T: 'a + Clone + ToOffset,
4882 O: 'a + FromAnchor,
4883 {
4884 let mut iterators: Vec<_> = self
4885 .diagnostics
4886 .iter()
4887 .map(|(_, collection)| {
4888 collection
4889 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4890 .peekable()
4891 })
4892 .collect();
4893
4894 std::iter::from_fn(move || {
4895 let (next_ix, _) = iterators
4896 .iter_mut()
4897 .enumerate()
4898 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4899 .min_by(|(_, a), (_, b)| {
4900 let cmp = a
4901 .range
4902 .start
4903 .cmp(&b.range.start, self)
4904 // when range is equal, sort by diagnostic severity
4905 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4906 // and stabilize order with group_id
4907 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4908 if reversed { cmp.reverse() } else { cmp }
4909 })?;
4910 iterators[next_ix]
4911 .next()
4912 .map(
4913 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4914 diagnostic,
4915 range: FromAnchor::from_anchor(&range.start, self)
4916 ..FromAnchor::from_anchor(&range.end, self),
4917 },
4918 )
4919 })
4920 }
4921
4922 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4923 /// should be used instead.
4924 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4925 &self.diagnostics
4926 }
4927
4928 /// Returns all the diagnostic groups associated with the given
4929 /// language server ID. If no language server ID is provided,
4930 /// all diagnostics groups are returned.
4931 pub fn diagnostic_groups(
4932 &self,
4933 language_server_id: Option<LanguageServerId>,
4934 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4935 let mut groups = Vec::new();
4936
4937 if let Some(language_server_id) = language_server_id {
4938 if let Ok(ix) = self
4939 .diagnostics
4940 .binary_search_by_key(&language_server_id, |e| e.0)
4941 {
4942 self.diagnostics[ix]
4943 .1
4944 .groups(language_server_id, &mut groups, self);
4945 }
4946 } else {
4947 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4948 diagnostics.groups(*language_server_id, &mut groups, self);
4949 }
4950 }
4951
4952 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4953 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4954 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4955 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4956 });
4957
4958 groups
4959 }
4960
4961 /// Returns an iterator over the diagnostics for the given group.
4962 pub fn diagnostic_group<O>(
4963 &self,
4964 group_id: usize,
4965 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4966 where
4967 O: FromAnchor + 'static,
4968 {
4969 self.diagnostics
4970 .iter()
4971 .flat_map(move |(_, set)| set.group(group_id, self))
4972 }
4973
4974 /// An integer version number that accounts for all updates besides
4975 /// the buffer's text itself (which is versioned via a version vector).
4976 pub fn non_text_state_update_count(&self) -> usize {
4977 self.non_text_state_update_count
4978 }
4979
4980 /// An integer version that changes when the buffer's syntax changes.
4981 pub fn syntax_update_count(&self) -> usize {
4982 self.syntax.update_count()
4983 }
4984
4985 /// Returns a snapshot of underlying file.
4986 pub fn file(&self) -> Option<&Arc<dyn File>> {
4987 self.file.as_ref()
4988 }
4989
4990 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4991 if let Some(file) = self.file() {
4992 if file.path().file_name().is_none() || include_root {
4993 Some(file.full_path(cx).to_string_lossy().into_owned())
4994 } else {
4995 Some(file.path().display(file.path_style(cx)).to_string())
4996 }
4997 } else {
4998 None
4999 }
5000 }
5001
5002 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5003 let query_str = query.fuzzy_contents;
5004 if query_str.is_some_and(|query| query.is_empty()) {
5005 return BTreeMap::default();
5006 }
5007
5008 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5009 language,
5010 override_id: None,
5011 }));
5012
5013 let mut query_ix = 0;
5014 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5015 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5016
5017 let mut words = BTreeMap::default();
5018 let mut current_word_start_ix = None;
5019 let mut chunk_ix = query.range.start;
5020 for chunk in self.chunks(query.range, false) {
5021 for (i, c) in chunk.text.char_indices() {
5022 let ix = chunk_ix + i;
5023 if classifier.is_word(c) {
5024 if current_word_start_ix.is_none() {
5025 current_word_start_ix = Some(ix);
5026 }
5027
5028 if let Some(query_chars) = &query_chars
5029 && query_ix < query_len
5030 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5031 {
5032 query_ix += 1;
5033 }
5034 continue;
5035 } else if let Some(word_start) = current_word_start_ix.take()
5036 && query_ix == query_len
5037 {
5038 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5039 let mut word_text = self.text_for_range(word_start..ix).peekable();
5040 let first_char = word_text
5041 .peek()
5042 .and_then(|first_chunk| first_chunk.chars().next());
5043 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5044 if !query.skip_digits
5045 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5046 {
5047 words.insert(word_text.collect(), word_range);
5048 }
5049 }
5050 query_ix = 0;
5051 }
5052 chunk_ix += chunk.text.len();
5053 }
5054
5055 words
5056 }
5057}
5058
5059pub struct WordsQuery<'a> {
5060 /// Only returns words with all chars from the fuzzy string in them.
5061 pub fuzzy_contents: Option<&'a str>,
5062 /// Skips words that start with a digit.
5063 pub skip_digits: bool,
5064 /// Buffer offset range, to look for words.
5065 pub range: Range<usize>,
5066}
5067
5068fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5069 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5070}
5071
5072fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5073 let mut result = IndentSize::spaces(0);
5074 for c in text {
5075 let kind = match c {
5076 ' ' => IndentKind::Space,
5077 '\t' => IndentKind::Tab,
5078 _ => break,
5079 };
5080 if result.len == 0 {
5081 result.kind = kind;
5082 }
5083 result.len += 1;
5084 }
5085 result
5086}
5087
5088impl Clone for BufferSnapshot {
5089 fn clone(&self) -> Self {
5090 Self {
5091 text: self.text.clone(),
5092 syntax: self.syntax.clone(),
5093 file: self.file.clone(),
5094 remote_selections: self.remote_selections.clone(),
5095 diagnostics: self.diagnostics.clone(),
5096 language: self.language.clone(),
5097 tree_sitter_data: self.tree_sitter_data.clone(),
5098 non_text_state_update_count: self.non_text_state_update_count,
5099 }
5100 }
5101}
5102
5103impl Deref for BufferSnapshot {
5104 type Target = text::BufferSnapshot;
5105
5106 fn deref(&self) -> &Self::Target {
5107 &self.text
5108 }
5109}
5110
5111unsafe impl Send for BufferChunks<'_> {}
5112
5113impl<'a> BufferChunks<'a> {
5114 pub(crate) fn new(
5115 text: &'a Rope,
5116 range: Range<usize>,
5117 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5118 diagnostics: bool,
5119 buffer_snapshot: Option<&'a BufferSnapshot>,
5120 ) -> Self {
5121 let mut highlights = None;
5122 if let Some((captures, highlight_maps)) = syntax {
5123 highlights = Some(BufferChunkHighlights {
5124 captures,
5125 next_capture: None,
5126 stack: Default::default(),
5127 highlight_maps,
5128 })
5129 }
5130
5131 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5132 let chunks = text.chunks_in_range(range.clone());
5133
5134 let mut this = BufferChunks {
5135 range,
5136 buffer_snapshot,
5137 chunks,
5138 diagnostic_endpoints,
5139 error_depth: 0,
5140 warning_depth: 0,
5141 information_depth: 0,
5142 hint_depth: 0,
5143 unnecessary_depth: 0,
5144 underline: true,
5145 highlights,
5146 };
5147 this.initialize_diagnostic_endpoints();
5148 this
5149 }
5150
5151 /// Seeks to the given byte offset in the buffer.
5152 pub fn seek(&mut self, range: Range<usize>) {
5153 let old_range = std::mem::replace(&mut self.range, range.clone());
5154 self.chunks.set_range(self.range.clone());
5155 if let Some(highlights) = self.highlights.as_mut() {
5156 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5157 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5158 highlights
5159 .stack
5160 .retain(|(end_offset, _)| *end_offset > range.start);
5161 if let Some(capture) = &highlights.next_capture
5162 && range.start >= capture.node.start_byte()
5163 {
5164 let next_capture_end = capture.node.end_byte();
5165 if range.start < next_capture_end {
5166 highlights.stack.push((
5167 next_capture_end,
5168 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5169 ));
5170 }
5171 highlights.next_capture.take();
5172 }
5173 } else if let Some(snapshot) = self.buffer_snapshot {
5174 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5175 *highlights = BufferChunkHighlights {
5176 captures,
5177 next_capture: None,
5178 stack: Default::default(),
5179 highlight_maps,
5180 };
5181 } else {
5182 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5183 // Seeking such BufferChunks is not supported.
5184 debug_assert!(
5185 false,
5186 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5187 );
5188 }
5189
5190 highlights.captures.set_byte_range(self.range.clone());
5191 self.initialize_diagnostic_endpoints();
5192 }
5193 }
5194
5195 fn initialize_diagnostic_endpoints(&mut self) {
5196 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5197 && let Some(buffer) = self.buffer_snapshot
5198 {
5199 let mut diagnostic_endpoints = Vec::new();
5200 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5201 diagnostic_endpoints.push(DiagnosticEndpoint {
5202 offset: entry.range.start,
5203 is_start: true,
5204 severity: entry.diagnostic.severity,
5205 is_unnecessary: entry.diagnostic.is_unnecessary,
5206 underline: entry.diagnostic.underline,
5207 });
5208 diagnostic_endpoints.push(DiagnosticEndpoint {
5209 offset: entry.range.end,
5210 is_start: false,
5211 severity: entry.diagnostic.severity,
5212 is_unnecessary: entry.diagnostic.is_unnecessary,
5213 underline: entry.diagnostic.underline,
5214 });
5215 }
5216 diagnostic_endpoints
5217 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5218 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5219 self.hint_depth = 0;
5220 self.error_depth = 0;
5221 self.warning_depth = 0;
5222 self.information_depth = 0;
5223 }
5224 }
5225
5226 /// The current byte offset in the buffer.
5227 pub fn offset(&self) -> usize {
5228 self.range.start
5229 }
5230
5231 pub fn range(&self) -> Range<usize> {
5232 self.range.clone()
5233 }
5234
5235 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5236 let depth = match endpoint.severity {
5237 DiagnosticSeverity::ERROR => &mut self.error_depth,
5238 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5239 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5240 DiagnosticSeverity::HINT => &mut self.hint_depth,
5241 _ => return,
5242 };
5243 if endpoint.is_start {
5244 *depth += 1;
5245 } else {
5246 *depth -= 1;
5247 }
5248
5249 if endpoint.is_unnecessary {
5250 if endpoint.is_start {
5251 self.unnecessary_depth += 1;
5252 } else {
5253 self.unnecessary_depth -= 1;
5254 }
5255 }
5256 }
5257
5258 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5259 if self.error_depth > 0 {
5260 Some(DiagnosticSeverity::ERROR)
5261 } else if self.warning_depth > 0 {
5262 Some(DiagnosticSeverity::WARNING)
5263 } else if self.information_depth > 0 {
5264 Some(DiagnosticSeverity::INFORMATION)
5265 } else if self.hint_depth > 0 {
5266 Some(DiagnosticSeverity::HINT)
5267 } else {
5268 None
5269 }
5270 }
5271
5272 fn current_code_is_unnecessary(&self) -> bool {
5273 self.unnecessary_depth > 0
5274 }
5275}
5276
5277impl<'a> Iterator for BufferChunks<'a> {
5278 type Item = Chunk<'a>;
5279
5280 fn next(&mut self) -> Option<Self::Item> {
5281 let mut next_capture_start = usize::MAX;
5282 let mut next_diagnostic_endpoint = usize::MAX;
5283
5284 if let Some(highlights) = self.highlights.as_mut() {
5285 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5286 if *parent_capture_end <= self.range.start {
5287 highlights.stack.pop();
5288 } else {
5289 break;
5290 }
5291 }
5292
5293 if highlights.next_capture.is_none() {
5294 highlights.next_capture = highlights.captures.next();
5295 }
5296
5297 while let Some(capture) = highlights.next_capture.as_ref() {
5298 if self.range.start < capture.node.start_byte() {
5299 next_capture_start = capture.node.start_byte();
5300 break;
5301 } else {
5302 let highlight_id =
5303 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5304 highlights
5305 .stack
5306 .push((capture.node.end_byte(), highlight_id));
5307 highlights.next_capture = highlights.captures.next();
5308 }
5309 }
5310 }
5311
5312 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5313 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5314 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5315 if endpoint.offset <= self.range.start {
5316 self.update_diagnostic_depths(endpoint);
5317 diagnostic_endpoints.next();
5318 self.underline = endpoint.underline;
5319 } else {
5320 next_diagnostic_endpoint = endpoint.offset;
5321 break;
5322 }
5323 }
5324 }
5325 self.diagnostic_endpoints = diagnostic_endpoints;
5326
5327 if let Some(ChunkBitmaps {
5328 text: chunk,
5329 chars: chars_map,
5330 tabs,
5331 }) = self.chunks.peek_with_bitmaps()
5332 {
5333 let chunk_start = self.range.start;
5334 let mut chunk_end = (self.chunks.offset() + chunk.len())
5335 .min(next_capture_start)
5336 .min(next_diagnostic_endpoint);
5337 let mut highlight_id = None;
5338 if let Some(highlights) = self.highlights.as_ref()
5339 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5340 {
5341 chunk_end = chunk_end.min(*parent_capture_end);
5342 highlight_id = Some(*parent_highlight_id);
5343 }
5344 let bit_start = chunk_start - self.chunks.offset();
5345 let bit_end = chunk_end - self.chunks.offset();
5346
5347 let slice = &chunk[bit_start..bit_end];
5348
5349 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5350 let tabs = (tabs >> bit_start) & mask;
5351 let chars = (chars_map >> bit_start) & mask;
5352
5353 self.range.start = chunk_end;
5354 if self.range.start == self.chunks.offset() + chunk.len() {
5355 self.chunks.next().unwrap();
5356 }
5357
5358 Some(Chunk {
5359 text: slice,
5360 syntax_highlight_id: highlight_id,
5361 underline: self.underline,
5362 diagnostic_severity: self.current_diagnostic_severity(),
5363 is_unnecessary: self.current_code_is_unnecessary(),
5364 tabs,
5365 chars,
5366 ..Chunk::default()
5367 })
5368 } else {
5369 None
5370 }
5371 }
5372}
5373
5374impl operation_queue::Operation for Operation {
5375 fn lamport_timestamp(&self) -> clock::Lamport {
5376 match self {
5377 Operation::Buffer(_) => {
5378 unreachable!("buffer operations should never be deferred at this layer")
5379 }
5380 Operation::UpdateDiagnostics {
5381 lamport_timestamp, ..
5382 }
5383 | Operation::UpdateSelections {
5384 lamport_timestamp, ..
5385 }
5386 | Operation::UpdateCompletionTriggers {
5387 lamport_timestamp, ..
5388 }
5389 | Operation::UpdateLineEnding {
5390 lamport_timestamp, ..
5391 } => *lamport_timestamp,
5392 }
5393 }
5394}
5395
5396impl Default for Diagnostic {
5397 fn default() -> Self {
5398 Self {
5399 source: Default::default(),
5400 source_kind: DiagnosticSourceKind::Other,
5401 code: None,
5402 code_description: None,
5403 severity: DiagnosticSeverity::ERROR,
5404 message: Default::default(),
5405 markdown: None,
5406 group_id: 0,
5407 is_primary: false,
5408 is_disk_based: false,
5409 is_unnecessary: false,
5410 underline: true,
5411 data: None,
5412 registration_id: None,
5413 }
5414 }
5415}
5416
5417impl IndentSize {
5418 /// Returns an [`IndentSize`] representing the given spaces.
5419 pub fn spaces(len: u32) -> Self {
5420 Self {
5421 len,
5422 kind: IndentKind::Space,
5423 }
5424 }
5425
5426 /// Returns an [`IndentSize`] representing a tab.
5427 pub fn tab() -> Self {
5428 Self {
5429 len: 1,
5430 kind: IndentKind::Tab,
5431 }
5432 }
5433
5434 /// An iterator over the characters represented by this [`IndentSize`].
5435 pub fn chars(&self) -> impl Iterator<Item = char> {
5436 iter::repeat(self.char()).take(self.len as usize)
5437 }
5438
5439 /// The character representation of this [`IndentSize`].
5440 pub fn char(&self) -> char {
5441 match self.kind {
5442 IndentKind::Space => ' ',
5443 IndentKind::Tab => '\t',
5444 }
5445 }
5446
5447 /// Consumes the current [`IndentSize`] and returns a new one that has
5448 /// been shrunk or enlarged by the given size along the given direction.
5449 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5450 match direction {
5451 Ordering::Less => {
5452 if self.kind == size.kind && self.len >= size.len {
5453 self.len -= size.len;
5454 }
5455 }
5456 Ordering::Equal => {}
5457 Ordering::Greater => {
5458 if self.len == 0 {
5459 self = size;
5460 } else if self.kind == size.kind {
5461 self.len += size.len;
5462 }
5463 }
5464 }
5465 self
5466 }
5467
5468 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5469 match self.kind {
5470 IndentKind::Space => self.len as usize,
5471 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5472 }
5473 }
5474}
5475
5476#[cfg(any(test, feature = "test-support"))]
5477pub struct TestFile {
5478 pub path: Arc<RelPath>,
5479 pub root_name: String,
5480 pub local_root: Option<PathBuf>,
5481}
5482
5483#[cfg(any(test, feature = "test-support"))]
5484impl File for TestFile {
5485 fn path(&self) -> &Arc<RelPath> {
5486 &self.path
5487 }
5488
5489 fn full_path(&self, _: &gpui::App) -> PathBuf {
5490 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5491 }
5492
5493 fn as_local(&self) -> Option<&dyn LocalFile> {
5494 if self.local_root.is_some() {
5495 Some(self)
5496 } else {
5497 None
5498 }
5499 }
5500
5501 fn disk_state(&self) -> DiskState {
5502 unimplemented!()
5503 }
5504
5505 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5506 self.path().file_name().unwrap_or(self.root_name.as_ref())
5507 }
5508
5509 fn worktree_id(&self, _: &App) -> WorktreeId {
5510 WorktreeId::from_usize(0)
5511 }
5512
5513 fn to_proto(&self, _: &App) -> rpc::proto::File {
5514 unimplemented!()
5515 }
5516
5517 fn is_private(&self) -> bool {
5518 false
5519 }
5520
5521 fn path_style(&self, _cx: &App) -> PathStyle {
5522 PathStyle::local()
5523 }
5524}
5525
5526#[cfg(any(test, feature = "test-support"))]
5527impl LocalFile for TestFile {
5528 fn abs_path(&self, _cx: &App) -> PathBuf {
5529 PathBuf::from(self.local_root.as_ref().unwrap())
5530 .join(&self.root_name)
5531 .join(self.path.as_std_path())
5532 }
5533
5534 fn load(&self, _cx: &App) -> Task<Result<String>> {
5535 unimplemented!()
5536 }
5537
5538 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5539 unimplemented!()
5540 }
5541}
5542
5543pub(crate) fn contiguous_ranges(
5544 values: impl Iterator<Item = u32>,
5545 max_len: usize,
5546) -> impl Iterator<Item = Range<u32>> {
5547 let mut values = values;
5548 let mut current_range: Option<Range<u32>> = None;
5549 std::iter::from_fn(move || {
5550 loop {
5551 if let Some(value) = values.next() {
5552 if let Some(range) = &mut current_range
5553 && value == range.end
5554 && range.len() < max_len
5555 {
5556 range.end += 1;
5557 continue;
5558 }
5559
5560 let prev_range = current_range.clone();
5561 current_range = Some(value..(value + 1));
5562 if prev_range.is_some() {
5563 return prev_range;
5564 }
5565 } else {
5566 return current_range.take();
5567 }
5568 }
5569 })
5570}
5571
5572#[derive(Default, Debug)]
5573pub struct CharClassifier {
5574 scope: Option<LanguageScope>,
5575 scope_context: Option<CharScopeContext>,
5576 ignore_punctuation: bool,
5577}
5578
5579impl CharClassifier {
5580 pub fn new(scope: Option<LanguageScope>) -> Self {
5581 Self {
5582 scope,
5583 scope_context: None,
5584 ignore_punctuation: false,
5585 }
5586 }
5587
5588 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5589 Self {
5590 scope_context,
5591 ..self
5592 }
5593 }
5594
5595 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5596 Self {
5597 ignore_punctuation,
5598 ..self
5599 }
5600 }
5601
5602 pub fn is_whitespace(&self, c: char) -> bool {
5603 self.kind(c) == CharKind::Whitespace
5604 }
5605
5606 pub fn is_word(&self, c: char) -> bool {
5607 self.kind(c) == CharKind::Word
5608 }
5609
5610 pub fn is_punctuation(&self, c: char) -> bool {
5611 self.kind(c) == CharKind::Punctuation
5612 }
5613
5614 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5615 if c.is_alphanumeric() || c == '_' {
5616 return CharKind::Word;
5617 }
5618
5619 if let Some(scope) = &self.scope {
5620 let characters = match self.scope_context {
5621 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5622 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5623 None => scope.word_characters(),
5624 };
5625 if let Some(characters) = characters
5626 && characters.contains(&c)
5627 {
5628 return CharKind::Word;
5629 }
5630 }
5631
5632 if c.is_whitespace() {
5633 return CharKind::Whitespace;
5634 }
5635
5636 if ignore_punctuation {
5637 CharKind::Word
5638 } else {
5639 CharKind::Punctuation
5640 }
5641 }
5642
5643 pub fn kind(&self, c: char) -> CharKind {
5644 self.kind_with(c, self.ignore_punctuation)
5645 }
5646}
5647
5648/// Find all of the ranges of whitespace that occur at the ends of lines
5649/// in the given rope.
5650///
5651/// This could also be done with a regex search, but this implementation
5652/// avoids copying text.
5653pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5654 let mut ranges = Vec::new();
5655
5656 let mut offset = 0;
5657 let mut prev_chunk_trailing_whitespace_range = 0..0;
5658 for chunk in rope.chunks() {
5659 let mut prev_line_trailing_whitespace_range = 0..0;
5660 for (i, line) in chunk.split('\n').enumerate() {
5661 let line_end_offset = offset + line.len();
5662 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5663 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5664
5665 if i == 0 && trimmed_line_len == 0 {
5666 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5667 }
5668 if !prev_line_trailing_whitespace_range.is_empty() {
5669 ranges.push(prev_line_trailing_whitespace_range);
5670 }
5671
5672 offset = line_end_offset + 1;
5673 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5674 }
5675
5676 offset -= 1;
5677 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5678 }
5679
5680 if !prev_chunk_trailing_whitespace_range.is_empty() {
5681 ranges.push(prev_chunk_trailing_whitespace_range);
5682 }
5683
5684 ranges
5685}