1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// The ID provided by the dynamic registration that produced this diagnostic.
249 pub registration_id: Option<SharedString>,
250 /// A machine-readable code that identifies this diagnostic.
251 pub code: Option<NumberOrString>,
252 pub code_description: Option<lsp::Uri>,
253 /// Whether this diagnostic is a hint, warning, or error.
254 pub severity: DiagnosticSeverity,
255 /// The human-readable message associated with this diagnostic.
256 pub message: String,
257 /// The human-readable message (in markdown format)
258 pub markdown: Option<String>,
259 /// An id that identifies the group to which this diagnostic belongs.
260 ///
261 /// When a language server produces a diagnostic with
262 /// one or more associated diagnostics, those diagnostics are all
263 /// assigned a single group ID.
264 pub group_id: usize,
265 /// Whether this diagnostic is the primary diagnostic for its group.
266 ///
267 /// In a given group, the primary diagnostic is the top-level diagnostic
268 /// returned by the language server. The non-primary diagnostics are the
269 /// associated diagnostics.
270 pub is_primary: bool,
271 /// Whether this diagnostic is considered to originate from an analysis of
272 /// files on disk, as opposed to any unsaved buffer contents. This is a
273 /// property of a given diagnostic source, and is configured for a given
274 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
275 /// for the language server.
276 pub is_disk_based: bool,
277 /// Whether this diagnostic marks unnecessary code.
278 pub is_unnecessary: bool,
279 /// Quick separation of diagnostics groups based by their source.
280 pub source_kind: DiagnosticSourceKind,
281 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
282 pub data: Option<Value>,
283 /// Whether to underline the corresponding text range in the editor.
284 pub underline: bool,
285}
286
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
288pub enum DiagnosticSourceKind {
289 Pulled,
290 Pushed,
291 Other,
292}
293
294/// An operation used to synchronize this buffer with its other replicas.
295#[derive(Clone, Debug, PartialEq)]
296pub enum Operation {
297 /// A text operation.
298 Buffer(text::Operation),
299
300 /// An update to the buffer's diagnostics.
301 UpdateDiagnostics {
302 /// The id of the language server that produced the new diagnostics.
303 server_id: LanguageServerId,
304 /// The diagnostics.
305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 },
309
310 /// An update to the most recent selections in this buffer.
311 UpdateSelections {
312 /// The selections.
313 selections: Arc<[Selection<Anchor>]>,
314 /// The buffer's lamport timestamp.
315 lamport_timestamp: clock::Lamport,
316 /// Whether the selections are in 'line mode'.
317 line_mode: bool,
318 /// The [`CursorShape`] associated with these selections.
319 cursor_shape: CursorShape,
320 },
321
322 /// An update to the characters that should trigger autocompletion
323 /// for this buffer.
324 UpdateCompletionTriggers {
325 /// The characters that trigger autocompletion.
326 triggers: Vec<String>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// The language server ID.
330 server_id: LanguageServerId,
331 },
332
333 /// An update to the line ending type of this buffer.
334 UpdateLineEnding {
335 /// The line ending type.
336 line_ending: LineEnding,
337 /// The buffer's lamport timestamp.
338 lamport_timestamp: clock::Lamport,
339 },
340}
341
342/// An event that occurs in a buffer.
343#[derive(Clone, Debug, PartialEq)]
344pub enum BufferEvent {
345 /// The buffer was changed in a way that must be
346 /// propagated to its other replicas.
347 Operation {
348 operation: Operation,
349 is_local: bool,
350 },
351 /// The buffer was edited.
352 Edited,
353 /// The buffer's `dirty` bit changed.
354 DirtyChanged,
355 /// The buffer was saved.
356 Saved,
357 /// The buffer's file was changed on disk.
358 FileHandleChanged,
359 /// The buffer was reloaded.
360 Reloaded,
361 /// The buffer is in need of a reload
362 ReloadNeeded,
363 /// The buffer's language was changed.
364 /// The boolean indicates whether this buffer did not have a language before, but does now.
365 LanguageChanged(bool),
366 /// The buffer's syntax trees were updated.
367 Reparsed,
368 /// The buffer's diagnostics were updated.
369 DiagnosticsUpdated,
370 /// The buffer gained or lost editing capabilities.
371 CapabilityChanged,
372}
373
374/// The file associated with a buffer.
375pub trait File: Send + Sync + Any {
376 /// Returns the [`LocalFile`] associated with this file, if the
377 /// file is local.
378 fn as_local(&self) -> Option<&dyn LocalFile>;
379
380 /// Returns whether this file is local.
381 fn is_local(&self) -> bool {
382 self.as_local().is_some()
383 }
384
385 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
386 /// only available in some states, such as modification time.
387 fn disk_state(&self) -> DiskState;
388
389 /// Returns the path of this file relative to the worktree's root directory.
390 fn path(&self) -> &Arc<RelPath>;
391
392 /// Returns the path of this file relative to the worktree's parent directory (this means it
393 /// includes the name of the worktree's root folder).
394 fn full_path(&self, cx: &App) -> PathBuf;
395
396 /// Returns the path style of this file.
397 fn path_style(&self, cx: &App) -> PathStyle;
398
399 /// Returns the last component of this handle's absolute path. If this handle refers to the root
400 /// of its worktree, then this method will return the name of the worktree itself.
401 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
402
403 /// Returns the id of the worktree to which this file belongs.
404 ///
405 /// This is needed for looking up project-specific settings.
406 fn worktree_id(&self, cx: &App) -> WorktreeId;
407
408 /// Converts this file into a protobuf message.
409 fn to_proto(&self, cx: &App) -> rpc::proto::File;
410
411 /// Return whether Zed considers this to be a private file.
412 fn is_private(&self) -> bool;
413}
414
415/// The file's storage status - whether it's stored (`Present`), and if so when it was last
416/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
417/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
418/// indicator for new files.
419#[derive(Copy, Clone, Debug, PartialEq)]
420pub enum DiskState {
421 /// File created in Zed that has not been saved.
422 New,
423 /// File present on the filesystem.
424 Present { mtime: MTime },
425 /// Deleted file that was previously present.
426 Deleted,
427}
428
429impl DiskState {
430 /// Returns the file's last known modification time on disk.
431 pub fn mtime(self) -> Option<MTime> {
432 match self {
433 DiskState::New => None,
434 DiskState::Present { mtime } => Some(mtime),
435 DiskState::Deleted => None,
436 }
437 }
438
439 pub fn exists(&self) -> bool {
440 match self {
441 DiskState::New => false,
442 DiskState::Present { .. } => true,
443 DiskState::Deleted => false,
444 }
445 }
446}
447
448/// The file associated with a buffer, in the case where the file is on the local disk.
449pub trait LocalFile: File {
450 /// Returns the absolute path of this file
451 fn abs_path(&self, cx: &App) -> PathBuf;
452
453 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
454 fn load(&self, cx: &App) -> Task<Result<String>>;
455
456 /// Loads the file's contents from disk.
457 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
458}
459
460/// The auto-indent behavior associated with an editing operation.
461/// For some editing operations, each affected line of text has its
462/// indentation recomputed. For other operations, the entire block
463/// of edited text is adjusted uniformly.
464#[derive(Clone, Debug)]
465pub enum AutoindentMode {
466 /// Indent each line of inserted text.
467 EachLine,
468 /// Apply the same indentation adjustment to all of the lines
469 /// in a given insertion.
470 Block {
471 /// The original indentation column of the first line of each
472 /// insertion, if it has been copied.
473 ///
474 /// Knowing this makes it possible to preserve the relative indentation
475 /// of every line in the insertion from when it was copied.
476 ///
477 /// If the original indent column is `a`, and the first line of insertion
478 /// is then auto-indented to column `b`, then every other line of
479 /// the insertion will be auto-indented to column `b - a`
480 original_indent_columns: Vec<Option<u32>>,
481 },
482}
483
484#[derive(Clone)]
485struct AutoindentRequest {
486 before_edit: BufferSnapshot,
487 entries: Vec<AutoindentRequestEntry>,
488 is_block_mode: bool,
489 ignore_empty_lines: bool,
490}
491
492#[derive(Debug, Clone)]
493struct AutoindentRequestEntry {
494 /// A range of the buffer whose indentation should be adjusted.
495 range: Range<Anchor>,
496 /// Whether or not these lines should be considered brand new, for the
497 /// purpose of auto-indent. When text is not new, its indentation will
498 /// only be adjusted if the suggested indentation level has *changed*
499 /// since the edit was made.
500 first_line_is_new: bool,
501 indent_size: IndentSize,
502 original_indent_column: Option<u32>,
503}
504
505#[derive(Debug)]
506struct IndentSuggestion {
507 basis_row: u32,
508 delta: Ordering,
509 within_error: bool,
510}
511
512struct BufferChunkHighlights<'a> {
513 captures: SyntaxMapCaptures<'a>,
514 next_capture: Option<SyntaxMapCapture<'a>>,
515 stack: Vec<(usize, HighlightId)>,
516 highlight_maps: Vec<HighlightMap>,
517}
518
519/// An iterator that yields chunks of a buffer's text, along with their
520/// syntax highlights and diagnostic status.
521pub struct BufferChunks<'a> {
522 buffer_snapshot: Option<&'a BufferSnapshot>,
523 range: Range<usize>,
524 chunks: text::Chunks<'a>,
525 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
526 error_depth: usize,
527 warning_depth: usize,
528 information_depth: usize,
529 hint_depth: usize,
530 unnecessary_depth: usize,
531 underline: bool,
532 highlights: Option<BufferChunkHighlights<'a>>,
533}
534
535/// A chunk of a buffer's text, along with its syntax highlight and
536/// diagnostic status.
537#[derive(Clone, Debug, Default)]
538pub struct Chunk<'a> {
539 /// The text of the chunk.
540 pub text: &'a str,
541 /// The syntax highlighting style of the chunk.
542 pub syntax_highlight_id: Option<HighlightId>,
543 /// The highlight style that has been applied to this chunk in
544 /// the editor.
545 pub highlight_style: Option<HighlightStyle>,
546 /// The severity of diagnostic associated with this chunk, if any.
547 pub diagnostic_severity: Option<DiagnosticSeverity>,
548 /// A bitset of which characters are tabs in this string.
549 pub tabs: u128,
550 /// Bitmap of character indices in this chunk
551 pub chars: u128,
552 /// Whether this chunk of text is marked as unnecessary.
553 pub is_unnecessary: bool,
554 /// Whether this chunk of text was originally a tab character.
555 pub is_tab: bool,
556 /// Whether this chunk of text was originally an inlay.
557 pub is_inlay: bool,
558 /// Whether to underline the corresponding text range in the editor.
559 pub underline: bool,
560}
561
562/// A set of edits to a given version of a buffer, computed asynchronously.
563#[derive(Debug)]
564pub struct Diff {
565 pub base_version: clock::Global,
566 pub line_ending: LineEnding,
567 pub edits: Vec<(Range<usize>, Arc<str>)>,
568}
569
570#[derive(Debug, Clone, Copy)]
571pub(crate) struct DiagnosticEndpoint {
572 offset: usize,
573 is_start: bool,
574 underline: bool,
575 severity: DiagnosticSeverity,
576 is_unnecessary: bool,
577}
578
579/// A class of characters, used for characterizing a run of text.
580#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
581pub enum CharKind {
582 /// Whitespace.
583 Whitespace,
584 /// Punctuation.
585 Punctuation,
586 /// Word.
587 Word,
588}
589
590/// Context for character classification within a specific scope.
591#[derive(Copy, Clone, Eq, PartialEq, Debug)]
592pub enum CharScopeContext {
593 /// Character classification for completion queries.
594 ///
595 /// This context treats certain characters as word constituents that would
596 /// normally be considered punctuation, such as '-' in Tailwind classes
597 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
598 Completion,
599 /// Character classification for linked edits.
600 ///
601 /// This context handles characters that should be treated as part of
602 /// identifiers during linked editing operations, such as '.' in JSX
603 /// component names like `<Animated.View>`.
604 LinkedEdit,
605}
606
607/// A runnable is a set of data about a region that could be resolved into a task
608pub struct Runnable {
609 pub tags: SmallVec<[RunnableTag; 1]>,
610 pub language: Arc<Language>,
611 pub buffer: BufferId,
612}
613
614#[derive(Default, Clone, Debug)]
615pub struct HighlightedText {
616 pub text: SharedString,
617 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
618}
619
620#[derive(Default, Debug)]
621struct HighlightedTextBuilder {
622 pub text: String,
623 highlights: Vec<(Range<usize>, HighlightStyle)>,
624}
625
626impl HighlightedText {
627 pub fn from_buffer_range<T: ToOffset>(
628 range: Range<T>,
629 snapshot: &text::BufferSnapshot,
630 syntax_snapshot: &SyntaxSnapshot,
631 override_style: Option<HighlightStyle>,
632 syntax_theme: &SyntaxTheme,
633 ) -> Self {
634 let mut highlighted_text = HighlightedTextBuilder::default();
635 highlighted_text.add_text_from_buffer_range(
636 range,
637 snapshot,
638 syntax_snapshot,
639 override_style,
640 syntax_theme,
641 );
642 highlighted_text.build()
643 }
644
645 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
646 gpui::StyledText::new(self.text.clone())
647 .with_default_highlights(default_style, self.highlights.iter().cloned())
648 }
649
650 /// Returns the first line without leading whitespace unless highlighted
651 /// and a boolean indicating if there are more lines after
652 pub fn first_line_preview(self) -> (Self, bool) {
653 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
654 let first_line = &self.text[..newline_ix];
655
656 // Trim leading whitespace, unless an edit starts prior to it.
657 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
658 if let Some((first_highlight_range, _)) = self.highlights.first() {
659 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
660 }
661
662 let preview_text = &first_line[preview_start_ix..];
663 let preview_highlights = self
664 .highlights
665 .into_iter()
666 .skip_while(|(range, _)| range.end <= preview_start_ix)
667 .take_while(|(range, _)| range.start < newline_ix)
668 .filter_map(|(mut range, highlight)| {
669 range.start = range.start.saturating_sub(preview_start_ix);
670 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
671 if range.is_empty() {
672 None
673 } else {
674 Some((range, highlight))
675 }
676 });
677
678 let preview = Self {
679 text: SharedString::new(preview_text),
680 highlights: preview_highlights.collect(),
681 };
682
683 (preview, self.text.len() > newline_ix)
684 }
685}
686
687impl HighlightedTextBuilder {
688 pub fn build(self) -> HighlightedText {
689 HighlightedText {
690 text: self.text.into(),
691 highlights: self.highlights,
692 }
693 }
694
695 pub fn add_text_from_buffer_range<T: ToOffset>(
696 &mut self,
697 range: Range<T>,
698 snapshot: &text::BufferSnapshot,
699 syntax_snapshot: &SyntaxSnapshot,
700 override_style: Option<HighlightStyle>,
701 syntax_theme: &SyntaxTheme,
702 ) {
703 let range = range.to_offset(snapshot);
704 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
705 let start = self.text.len();
706 self.text.push_str(chunk.text);
707 let end = self.text.len();
708
709 if let Some(highlight_style) = chunk
710 .syntax_highlight_id
711 .and_then(|id| id.style(syntax_theme))
712 {
713 let highlight_style = override_style.map_or(highlight_style, |override_style| {
714 highlight_style.highlight(override_style)
715 });
716 self.highlights.push((start..end, highlight_style));
717 } else if let Some(override_style) = override_style {
718 self.highlights.push((start..end, override_style));
719 }
720 }
721 }
722
723 fn highlighted_chunks<'a>(
724 range: Range<usize>,
725 snapshot: &'a text::BufferSnapshot,
726 syntax_snapshot: &'a SyntaxSnapshot,
727 ) -> BufferChunks<'a> {
728 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
729 grammar
730 .highlights_config
731 .as_ref()
732 .map(|config| &config.query)
733 });
734
735 let highlight_maps = captures
736 .grammars()
737 .iter()
738 .map(|grammar| grammar.highlight_map())
739 .collect();
740
741 BufferChunks::new(
742 snapshot.as_rope(),
743 range,
744 Some((captures, highlight_maps)),
745 false,
746 None,
747 )
748 }
749}
750
751#[derive(Clone)]
752pub struct EditPreview {
753 old_snapshot: text::BufferSnapshot,
754 applied_edits_snapshot: text::BufferSnapshot,
755 syntax_snapshot: SyntaxSnapshot,
756}
757
758impl EditPreview {
759 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first.start.to_point(&self.old_snapshot);
764 let old_end = last.end.to_point(&self.old_snapshot);
765 let new_end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 let start = Point::new(start.row.saturating_sub(3), 0);
771 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
772 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
773
774 Some(unified_diff(
775 &self
776 .old_snapshot
777 .text_for_range(start..old_end)
778 .collect::<String>(),
779 &self
780 .applied_edits_snapshot
781 .text_for_range(start..new_end)
782 .collect::<String>(),
783 ))
784 }
785
786 pub fn highlight_edits(
787 &self,
788 current_snapshot: &BufferSnapshot,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 include_deletions: bool,
791 cx: &App,
792 ) -> HighlightedText {
793 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
794 return HighlightedText::default();
795 };
796
797 let mut highlighted_text = HighlightedTextBuilder::default();
798
799 let visible_range_in_preview_snapshot =
800 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
801 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
802
803 let insertion_highlight_style = HighlightStyle {
804 background_color: Some(cx.theme().status().created_background),
805 ..Default::default()
806 };
807 let deletion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().deleted_background),
809 ..Default::default()
810 };
811 let syntax_theme = cx.theme().syntax();
812
813 for (range, edit_text) in edits {
814 let edit_new_end_in_preview_snapshot = range
815 .end
816 .bias_right(&self.old_snapshot)
817 .to_offset(&self.applied_edits_snapshot);
818 let edit_start_in_preview_snapshot =
819 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
820
821 let unchanged_range_in_preview_snapshot =
822 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
823 if !unchanged_range_in_preview_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 unchanged_range_in_preview_snapshot,
826 &self.applied_edits_snapshot,
827 &self.syntax_snapshot,
828 None,
829 syntax_theme,
830 );
831 }
832
833 let range_in_current_snapshot = range.to_offset(current_snapshot);
834 if include_deletions && !range_in_current_snapshot.is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 range_in_current_snapshot,
837 ¤t_snapshot.text,
838 ¤t_snapshot.syntax,
839 Some(deletion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 if !edit_text.as_ref().is_empty() {
845 highlighted_text.add_text_from_buffer_range(
846 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
847 &self.applied_edits_snapshot,
848 &self.syntax_snapshot,
849 Some(insertion_highlight_style),
850 syntax_theme,
851 );
852 }
853
854 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
855 }
856
857 highlighted_text.add_text_from_buffer_range(
858 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
859 &self.applied_edits_snapshot,
860 &self.syntax_snapshot,
861 None,
862 syntax_theme,
863 );
864
865 highlighted_text.build()
866 }
867
868 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
869 cx.new(|cx| {
870 let mut buffer = Buffer::local_normalized(
871 self.applied_edits_snapshot.as_rope().clone(),
872 self.applied_edits_snapshot.line_ending(),
873 cx,
874 );
875 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
876 buffer
877 })
878 }
879
880 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
881 let (first, _) = edits.first()?;
882 let (last, _) = edits.last()?;
883
884 let start = first
885 .start
886 .bias_left(&self.old_snapshot)
887 .to_point(&self.applied_edits_snapshot);
888 let end = last
889 .end
890 .bias_right(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892
893 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
894 let range = Point::new(start.row, 0)
895 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
896
897 Some(range)
898 }
899}
900
901#[derive(Clone, Debug, PartialEq, Eq)]
902pub struct BracketMatch<T> {
903 pub open_range: Range<T>,
904 pub close_range: Range<T>,
905 pub newline_only: bool,
906 pub syntax_layer_depth: usize,
907 pub color_index: Option<usize>,
908}
909
910impl<T> BracketMatch<T> {
911 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
912 (self.open_range, self.close_range)
913 }
914}
915
916impl Buffer {
917 /// Create a new buffer with the given base text.
918 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
919 Self::build(
920 TextBuffer::new(
921 ReplicaId::LOCAL,
922 cx.entity_id().as_non_zero_u64().into(),
923 base_text.into(),
924 ),
925 None,
926 Capability::ReadWrite,
927 )
928 }
929
930 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
931 pub fn local_normalized(
932 base_text_normalized: Rope,
933 line_ending: LineEnding,
934 cx: &Context<Self>,
935 ) -> Self {
936 Self::build(
937 TextBuffer::new_normalized(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 line_ending,
941 base_text_normalized,
942 ),
943 None,
944 Capability::ReadWrite,
945 )
946 }
947
948 /// Create a new buffer that is a replica of a remote buffer.
949 pub fn remote(
950 remote_id: BufferId,
951 replica_id: ReplicaId,
952 capability: Capability,
953 base_text: impl Into<String>,
954 ) -> Self {
955 Self::build(
956 TextBuffer::new(replica_id, remote_id, base_text.into()),
957 None,
958 capability,
959 )
960 }
961
962 /// Create a new buffer that is a replica of a remote buffer, populating its
963 /// state from the given protobuf message.
964 pub fn from_proto(
965 replica_id: ReplicaId,
966 capability: Capability,
967 message: proto::BufferState,
968 file: Option<Arc<dyn File>>,
969 ) -> Result<Self> {
970 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
971 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
972 let mut this = Self::build(buffer, file, capability);
973 this.text.set_line_ending(proto::deserialize_line_ending(
974 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
975 ));
976 this.saved_version = proto::deserialize_version(&message.saved_version);
977 this.saved_mtime = message.saved_mtime.map(|time| time.into());
978 Ok(this)
979 }
980
981 /// Serialize the buffer's state to a protobuf message.
982 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
983 proto::BufferState {
984 id: self.remote_id().into(),
985 file: self.file.as_ref().map(|f| f.to_proto(cx)),
986 base_text: self.base_text().to_string(),
987 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
988 saved_version: proto::serialize_version(&self.saved_version),
989 saved_mtime: self.saved_mtime.map(|time| time.into()),
990 }
991 }
992
993 /// Serialize as protobufs all of the changes to the buffer since the given version.
994 pub fn serialize_ops(
995 &self,
996 since: Option<clock::Global>,
997 cx: &App,
998 ) -> Task<Vec<proto::Operation>> {
999 let mut operations = Vec::new();
1000 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1001
1002 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1003 proto::serialize_operation(&Operation::UpdateSelections {
1004 selections: set.selections.clone(),
1005 lamport_timestamp: set.lamport_timestamp,
1006 line_mode: set.line_mode,
1007 cursor_shape: set.cursor_shape,
1008 })
1009 }));
1010
1011 for (server_id, diagnostics) in &self.diagnostics {
1012 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1013 lamport_timestamp: self.diagnostics_timestamp,
1014 server_id: *server_id,
1015 diagnostics: diagnostics.iter().cloned().collect(),
1016 }));
1017 }
1018
1019 for (server_id, completions) in &self.completion_triggers_per_language_server {
1020 operations.push(proto::serialize_operation(
1021 &Operation::UpdateCompletionTriggers {
1022 triggers: completions.iter().cloned().collect(),
1023 lamport_timestamp: self.completion_triggers_timestamp,
1024 server_id: *server_id,
1025 },
1026 ));
1027 }
1028
1029 let text_operations = self.text.operations().clone();
1030 cx.background_spawn(async move {
1031 let since = since.unwrap_or_default();
1032 operations.extend(
1033 text_operations
1034 .iter()
1035 .filter(|(_, op)| !since.observed(op.timestamp()))
1036 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1037 );
1038 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1039 operations
1040 })
1041 }
1042
1043 /// Assign a language to the buffer, returning the buffer.
1044 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1045 self.set_language_async(Some(language), cx);
1046 self
1047 }
1048
1049 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1050 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1051 self.set_language(Some(language), cx);
1052 self
1053 }
1054
1055 /// Returns the [`Capability`] of this buffer.
1056 pub fn capability(&self) -> Capability {
1057 self.capability
1058 }
1059
1060 /// Whether this buffer can only be read.
1061 pub fn read_only(&self) -> bool {
1062 self.capability == Capability::ReadOnly
1063 }
1064
1065 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1066 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1067 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1068 let snapshot = buffer.snapshot();
1069 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1070 let tree_sitter_data = TreeSitterData::new(snapshot);
1071 Self {
1072 saved_mtime,
1073 tree_sitter_data: Arc::new(tree_sitter_data),
1074 saved_version: buffer.version(),
1075 preview_version: buffer.version(),
1076 reload_task: None,
1077 transaction_depth: 0,
1078 was_dirty_before_starting_transaction: None,
1079 has_unsaved_edits: Cell::new((buffer.version(), false)),
1080 text: buffer,
1081 branch_state: None,
1082 file,
1083 capability,
1084 syntax_map,
1085 reparse: None,
1086 non_text_state_update_count: 0,
1087 sync_parse_timeout: Duration::from_millis(1),
1088 parse_status: watch::channel(ParseStatus::Idle),
1089 autoindent_requests: Default::default(),
1090 wait_for_autoindent_txs: Default::default(),
1091 pending_autoindent: Default::default(),
1092 language: None,
1093 remote_selections: Default::default(),
1094 diagnostics: Default::default(),
1095 diagnostics_timestamp: Lamport::MIN,
1096 completion_triggers: Default::default(),
1097 completion_triggers_per_language_server: Default::default(),
1098 completion_triggers_timestamp: Lamport::MIN,
1099 deferred_ops: OperationQueue::new(),
1100 has_conflict: false,
1101 change_bits: Default::default(),
1102 _subscriptions: Vec::new(),
1103 }
1104 }
1105
1106 pub fn build_snapshot(
1107 text: Rope,
1108 language: Option<Arc<Language>>,
1109 language_registry: Option<Arc<LanguageRegistry>>,
1110 cx: &mut App,
1111 ) -> impl Future<Output = BufferSnapshot> + use<> {
1112 let entity_id = cx.reserve_entity::<Self>().entity_id();
1113 let buffer_id = entity_id.as_non_zero_u64().into();
1114 async move {
1115 let text =
1116 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1117 .snapshot();
1118 let mut syntax = SyntaxMap::new(&text).snapshot();
1119 if let Some(language) = language.clone() {
1120 let language_registry = language_registry.clone();
1121 syntax.reparse(&text, language_registry, language);
1122 }
1123 let tree_sitter_data = TreeSitterData::new(text.clone());
1124 BufferSnapshot {
1125 text,
1126 syntax,
1127 file: None,
1128 diagnostics: Default::default(),
1129 remote_selections: Default::default(),
1130 tree_sitter_data: Arc::new(tree_sitter_data),
1131 language,
1132 non_text_state_update_count: 0,
1133 }
1134 }
1135 }
1136
1137 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1138 let entity_id = cx.reserve_entity::<Self>().entity_id();
1139 let buffer_id = entity_id.as_non_zero_u64().into();
1140 let text = TextBuffer::new_normalized(
1141 ReplicaId::LOCAL,
1142 buffer_id,
1143 Default::default(),
1144 Rope::new(),
1145 )
1146 .snapshot();
1147 let syntax = SyntaxMap::new(&text).snapshot();
1148 let tree_sitter_data = TreeSitterData::new(text.clone());
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(tree_sitter_data),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language: None,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 #[cfg(any(test, feature = "test-support"))]
1162 pub fn build_snapshot_sync(
1163 text: Rope,
1164 language: Option<Arc<Language>>,
1165 language_registry: Option<Arc<LanguageRegistry>>,
1166 cx: &mut App,
1167 ) -> BufferSnapshot {
1168 let entity_id = cx.reserve_entity::<Self>().entity_id();
1169 let buffer_id = entity_id.as_non_zero_u64().into();
1170 let text =
1171 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1172 .snapshot();
1173 let mut syntax = SyntaxMap::new(&text).snapshot();
1174 if let Some(language) = language.clone() {
1175 syntax.reparse(&text, language_registry, language);
1176 }
1177 let tree_sitter_data = TreeSitterData::new(text.clone());
1178 BufferSnapshot {
1179 text,
1180 syntax,
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 file: None,
1183 diagnostics: Default::default(),
1184 remote_selections: Default::default(),
1185 language,
1186 non_text_state_update_count: 0,
1187 }
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's current state. This is computationally
1191 /// cheap, and allows reading from the buffer on a background thread.
1192 pub fn snapshot(&self) -> BufferSnapshot {
1193 let text = self.text.snapshot();
1194 let mut syntax_map = self.syntax_map.lock();
1195 syntax_map.interpolate(&text);
1196 let syntax = syntax_map.snapshot();
1197
1198 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1199 Arc::new(TreeSitterData::new(text.clone()))
1200 } else {
1201 self.tree_sitter_data.clone()
1202 };
1203
1204 BufferSnapshot {
1205 text,
1206 syntax,
1207 tree_sitter_data,
1208 file: self.file.clone(),
1209 remote_selections: self.remote_selections.clone(),
1210 diagnostics: self.diagnostics.clone(),
1211 language: self.language.clone(),
1212 non_text_state_update_count: self.non_text_state_update_count,
1213 }
1214 }
1215
1216 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1217 let this = cx.entity();
1218 cx.new(|cx| {
1219 let mut branch = Self {
1220 branch_state: Some(BufferBranchState {
1221 base_buffer: this.clone(),
1222 merged_operations: Default::default(),
1223 }),
1224 language: self.language.clone(),
1225 has_conflict: self.has_conflict,
1226 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1227 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1228 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1229 };
1230 if let Some(language_registry) = self.language_registry() {
1231 branch.set_language_registry(language_registry);
1232 }
1233
1234 // Reparse the branch buffer so that we get syntax highlighting immediately.
1235 branch.reparse(cx, true);
1236
1237 branch
1238 })
1239 }
1240
1241 pub fn preview_edits(
1242 &self,
1243 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1244 cx: &App,
1245 ) -> Task<EditPreview> {
1246 let registry = self.language_registry();
1247 let language = self.language().cloned();
1248 let old_snapshot = self.text.snapshot();
1249 let mut branch_buffer = self.text.branch();
1250 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1251 cx.background_spawn(async move {
1252 if !edits.is_empty() {
1253 if let Some(language) = language.clone() {
1254 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1255 }
1256
1257 branch_buffer.edit(edits.iter().cloned());
1258 let snapshot = branch_buffer.snapshot();
1259 syntax_snapshot.interpolate(&snapshot);
1260
1261 if let Some(language) = language {
1262 syntax_snapshot.reparse(&snapshot, registry, language);
1263 }
1264 }
1265 EditPreview {
1266 old_snapshot,
1267 applied_edits_snapshot: branch_buffer.snapshot(),
1268 syntax_snapshot,
1269 }
1270 })
1271 }
1272
1273 /// Applies all of the changes in this buffer that intersect any of the
1274 /// given `ranges` to its base buffer.
1275 ///
1276 /// If `ranges` is empty, then all changes will be applied. This buffer must
1277 /// be a branch buffer to call this method.
1278 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1279 let Some(base_buffer) = self.base_buffer() else {
1280 debug_panic!("not a branch buffer");
1281 return;
1282 };
1283
1284 let mut ranges = if ranges.is_empty() {
1285 &[0..usize::MAX]
1286 } else {
1287 ranges.as_slice()
1288 }
1289 .iter()
1290 .peekable();
1291
1292 let mut edits = Vec::new();
1293 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1294 let mut is_included = false;
1295 while let Some(range) = ranges.peek() {
1296 if range.end < edit.new.start {
1297 ranges.next().unwrap();
1298 } else {
1299 if range.start <= edit.new.end {
1300 is_included = true;
1301 }
1302 break;
1303 }
1304 }
1305
1306 if is_included {
1307 edits.push((
1308 edit.old.clone(),
1309 self.text_for_range(edit.new.clone()).collect::<String>(),
1310 ));
1311 }
1312 }
1313
1314 let operation = base_buffer.update(cx, |base_buffer, cx| {
1315 // cx.emit(BufferEvent::DiffBaseChanged);
1316 base_buffer.edit(edits, None, cx)
1317 });
1318
1319 if let Some(operation) = operation
1320 && let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 {
1324 merged_operations.push(operation);
1325 }
1326 }
1327
1328 fn on_base_buffer_event(
1329 &mut self,
1330 _: Entity<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 let BufferEvent::Operation { operation, .. } = event else {
1335 return;
1336 };
1337 let Some(BufferBranchState {
1338 merged_operations, ..
1339 }) = &mut self.branch_state
1340 else {
1341 return;
1342 };
1343
1344 let mut operation_to_undo = None;
1345 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1346 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1347 {
1348 merged_operations.remove(ix);
1349 operation_to_undo = Some(operation.timestamp);
1350 }
1351
1352 self.apply_ops([operation.clone()], cx);
1353
1354 if let Some(timestamp) = operation_to_undo {
1355 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1356 self.undo_operations(counts, cx);
1357 }
1358 }
1359
1360 #[cfg(test)]
1361 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1362 &self.text
1363 }
1364
1365 /// Retrieve a snapshot of the buffer's raw text, without any
1366 /// language-related state like the syntax tree or diagnostics.
1367 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1368 self.text.snapshot()
1369 }
1370
1371 /// The file associated with the buffer, if any.
1372 pub fn file(&self) -> Option<&Arc<dyn File>> {
1373 self.file.as_ref()
1374 }
1375
1376 /// The version of the buffer that was last saved or reloaded from disk.
1377 pub fn saved_version(&self) -> &clock::Global {
1378 &self.saved_version
1379 }
1380
1381 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1382 pub fn saved_mtime(&self) -> Option<MTime> {
1383 self.saved_mtime
1384 }
1385
1386 /// Assign a language to the buffer.
1387 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1388 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1389 }
1390
1391 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1392 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1393 self.set_language_(language, true, cx);
1394 }
1395
1396 fn set_language_(
1397 &mut self,
1398 language: Option<Arc<Language>>,
1399 may_block: bool,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.non_text_state_update_count += 1;
1403 self.syntax_map.lock().clear(&self.text);
1404 let old_language = std::mem::replace(&mut self.language, language);
1405 self.was_changed();
1406 self.reparse(cx, may_block);
1407 let has_fresh_language =
1408 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1409 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1410 }
1411
1412 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1413 /// other languages if parts of the buffer are written in different languages.
1414 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1415 self.syntax_map
1416 .lock()
1417 .set_language_registry(language_registry);
1418 }
1419
1420 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1421 self.syntax_map.lock().language_registry()
1422 }
1423
1424 /// Assign the line ending type to the buffer.
1425 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1426 self.text.set_line_ending(line_ending);
1427
1428 let lamport_timestamp = self.text.lamport_clock.tick();
1429 self.send_operation(
1430 Operation::UpdateLineEnding {
1431 line_ending,
1432 lamport_timestamp,
1433 },
1434 true,
1435 cx,
1436 );
1437 }
1438
1439 /// Assign the buffer a new [`Capability`].
1440 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1441 if self.capability != capability {
1442 self.capability = capability;
1443 cx.emit(BufferEvent::CapabilityChanged)
1444 }
1445 }
1446
1447 /// This method is called to signal that the buffer has been saved.
1448 pub fn did_save(
1449 &mut self,
1450 version: clock::Global,
1451 mtime: Option<MTime>,
1452 cx: &mut Context<Self>,
1453 ) {
1454 self.saved_version = version.clone();
1455 self.has_unsaved_edits.set((version, false));
1456 self.has_conflict = false;
1457 self.saved_mtime = mtime;
1458 self.was_changed();
1459 cx.emit(BufferEvent::Saved);
1460 cx.notify();
1461 }
1462
1463 /// Reloads the contents of the buffer from disk.
1464 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1465 let (tx, rx) = futures::channel::oneshot::channel();
1466 let prev_version = self.text.version();
1467 self.reload_task = Some(cx.spawn(async move |this, cx| {
1468 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1469 let file = this.file.as_ref()?.as_local()?;
1470
1471 Some((file.disk_state().mtime(), file.load(cx)))
1472 })?
1473 else {
1474 return Ok(());
1475 };
1476
1477 let new_text = new_text.await?;
1478 let diff = this
1479 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1480 .await;
1481 this.update(cx, |this, cx| {
1482 if this.version() == diff.base_version {
1483 this.finalize_last_transaction();
1484 this.apply_diff(diff, cx);
1485 tx.send(this.finalize_last_transaction().cloned()).ok();
1486 this.has_conflict = false;
1487 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1488 } else {
1489 if !diff.edits.is_empty()
1490 || this
1491 .edits_since::<usize>(&diff.base_version)
1492 .next()
1493 .is_some()
1494 {
1495 this.has_conflict = true;
1496 }
1497
1498 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1499 }
1500
1501 this.reload_task.take();
1502 })
1503 }));
1504 rx
1505 }
1506
1507 /// This method is called to signal that the buffer has been reloaded.
1508 pub fn did_reload(
1509 &mut self,
1510 version: clock::Global,
1511 line_ending: LineEnding,
1512 mtime: Option<MTime>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 self.saved_version = version;
1516 self.has_unsaved_edits
1517 .set((self.saved_version.clone(), false));
1518 self.text.set_line_ending(line_ending);
1519 self.saved_mtime = mtime;
1520 cx.emit(BufferEvent::Reloaded);
1521 cx.notify();
1522 }
1523
1524 /// Updates the [`File`] backing this buffer. This should be called when
1525 /// the file has changed or has been deleted.
1526 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1527 let was_dirty = self.is_dirty();
1528 let mut file_changed = false;
1529
1530 if let Some(old_file) = self.file.as_ref() {
1531 if new_file.path() != old_file.path() {
1532 file_changed = true;
1533 }
1534
1535 let old_state = old_file.disk_state();
1536 let new_state = new_file.disk_state();
1537 if old_state != new_state {
1538 file_changed = true;
1539 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1540 cx.emit(BufferEvent::ReloadNeeded)
1541 }
1542 }
1543 } else {
1544 file_changed = true;
1545 };
1546
1547 self.file = Some(new_file);
1548 if file_changed {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 if was_dirty != self.is_dirty() {
1552 cx.emit(BufferEvent::DirtyChanged);
1553 }
1554 cx.emit(BufferEvent::FileHandleChanged);
1555 cx.notify();
1556 }
1557 }
1558
1559 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1560 Some(self.branch_state.as_ref()?.base_buffer.clone())
1561 }
1562
1563 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1564 pub fn language(&self) -> Option<&Arc<Language>> {
1565 self.language.as_ref()
1566 }
1567
1568 /// Returns the [`Language`] at the given location.
1569 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1570 let offset = position.to_offset(self);
1571 let mut is_first = true;
1572 let start_anchor = self.anchor_before(offset);
1573 let end_anchor = self.anchor_after(offset);
1574 self.syntax_map
1575 .lock()
1576 .layers_for_range(offset..offset, &self.text, false)
1577 .filter(|layer| {
1578 if is_first {
1579 is_first = false;
1580 return true;
1581 }
1582
1583 layer
1584 .included_sub_ranges
1585 .map(|sub_ranges| {
1586 sub_ranges.iter().any(|sub_range| {
1587 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1588 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1589 !is_before_start && !is_after_end
1590 })
1591 })
1592 .unwrap_or(true)
1593 })
1594 .last()
1595 .map(|info| info.language.clone())
1596 .or_else(|| self.language.clone())
1597 }
1598
1599 /// Returns each [`Language`] for the active syntax layers at the given location.
1600 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1601 let offset = position.to_offset(self);
1602 let mut languages: Vec<Arc<Language>> = self
1603 .syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .map(|info| info.language.clone())
1607 .collect();
1608
1609 if languages.is_empty()
1610 && let Some(buffer_language) = self.language()
1611 {
1612 languages.push(buffer_language.clone());
1613 }
1614
1615 languages
1616 }
1617
1618 /// An integer version number that accounts for all updates besides
1619 /// the buffer's text itself (which is versioned via a version vector).
1620 pub fn non_text_state_update_count(&self) -> usize {
1621 self.non_text_state_update_count
1622 }
1623
1624 /// Whether the buffer is being parsed in the background.
1625 #[cfg(any(test, feature = "test-support"))]
1626 pub fn is_parsing(&self) -> bool {
1627 self.reparse.is_some()
1628 }
1629
1630 /// Indicates whether the buffer contains any regions that may be
1631 /// written in a language that hasn't been loaded yet.
1632 pub fn contains_unknown_injections(&self) -> bool {
1633 self.syntax_map.lock().contains_unknown_injections()
1634 }
1635
1636 #[cfg(any(test, feature = "test-support"))]
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1642 match Arc::get_mut(&mut self.tree_sitter_data) {
1643 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1644 None => {
1645 let tree_sitter_data = TreeSitterData::new(snapshot);
1646 self.tree_sitter_data = Arc::new(tree_sitter_data)
1647 }
1648 }
1649 }
1650
1651 /// Called after an edit to synchronize the buffer's main parse tree with
1652 /// the buffer's new underlying state.
1653 ///
1654 /// Locks the syntax map and interpolates the edits since the last reparse
1655 /// into the foreground syntax tree.
1656 ///
1657 /// Then takes a stable snapshot of the syntax map before unlocking it.
1658 /// The snapshot with the interpolated edits is sent to a background thread,
1659 /// where we ask Tree-sitter to perform an incremental parse.
1660 ///
1661 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1662 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1663 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1664 ///
1665 /// If we time out waiting on the parse, we spawn a second task waiting
1666 /// until the parse does complete and return with the interpolated tree still
1667 /// in the foreground. When the background parse completes, call back into
1668 /// the main thread and assign the foreground parse state.
1669 ///
1670 /// If the buffer or grammar changed since the start of the background parse,
1671 /// initiate an additional reparse recursively. To avoid concurrent parses
1672 /// for the same buffer, we only initiate a new parse if we are not already
1673 /// parsing in the background.
1674 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1675 if self.text.version() != *self.tree_sitter_data.version() {
1676 self.invalidate_tree_sitter_data(self.text.snapshot());
1677 }
1678 if self.reparse.is_some() {
1679 return;
1680 }
1681 let language = if let Some(language) = self.language.clone() {
1682 language
1683 } else {
1684 return;
1685 };
1686
1687 let text = self.text_snapshot();
1688 let parsed_version = self.version();
1689
1690 let mut syntax_map = self.syntax_map.lock();
1691 syntax_map.interpolate(&text);
1692 let language_registry = syntax_map.language_registry();
1693 let mut syntax_snapshot = syntax_map.snapshot();
1694 drop(syntax_map);
1695
1696 let parse_task = cx.background_spawn({
1697 let language = language.clone();
1698 let language_registry = language_registry.clone();
1699 async move {
1700 syntax_snapshot.reparse(&text, language_registry, language);
1701 syntax_snapshot
1702 }
1703 });
1704
1705 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1706 if may_block {
1707 match cx
1708 .background_executor()
1709 .block_with_timeout(self.sync_parse_timeout, parse_task)
1710 {
1711 Ok(new_syntax_snapshot) => {
1712 self.did_finish_parsing(new_syntax_snapshot, cx);
1713 self.reparse = None;
1714 }
1715 Err(parse_task) => {
1716 self.reparse = Some(cx.spawn(async move |this, cx| {
1717 let new_syntax_map = cx.background_spawn(parse_task).await;
1718 this.update(cx, move |this, cx| {
1719 let grammar_changed = || {
1720 this.language.as_ref().is_none_or(|current_language| {
1721 !Arc::ptr_eq(&language, current_language)
1722 })
1723 };
1724 let language_registry_changed = || {
1725 new_syntax_map.contains_unknown_injections()
1726 && language_registry.is_some_and(|registry| {
1727 registry.version()
1728 != new_syntax_map.language_registry_version()
1729 })
1730 };
1731 let parse_again = this.version.changed_since(&parsed_version)
1732 || language_registry_changed()
1733 || grammar_changed();
1734 this.did_finish_parsing(new_syntax_map, cx);
1735 this.reparse = None;
1736 if parse_again {
1737 this.reparse(cx, false);
1738 }
1739 })
1740 .ok();
1741 }));
1742 }
1743 }
1744 } else {
1745 self.reparse = Some(cx.spawn(async move |this, cx| {
1746 let new_syntax_map = cx.background_spawn(parse_task).await;
1747 this.update(cx, move |this, cx| {
1748 let grammar_changed = || {
1749 this.language.as_ref().is_none_or(|current_language| {
1750 !Arc::ptr_eq(&language, current_language)
1751 })
1752 };
1753 let language_registry_changed = || {
1754 new_syntax_map.contains_unknown_injections()
1755 && language_registry.is_some_and(|registry| {
1756 registry.version() != new_syntax_map.language_registry_version()
1757 })
1758 };
1759 let parse_again = this.version.changed_since(&parsed_version)
1760 || language_registry_changed()
1761 || grammar_changed();
1762 this.did_finish_parsing(new_syntax_map, cx);
1763 this.reparse = None;
1764 if parse_again {
1765 this.reparse(cx, false);
1766 }
1767 })
1768 .ok();
1769 }));
1770 }
1771 }
1772
1773 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1774 self.was_changed();
1775 self.non_text_state_update_count += 1;
1776 self.syntax_map.lock().did_parse(syntax_snapshot);
1777 self.request_autoindent(cx);
1778 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1779 if self.text.version() != *self.tree_sitter_data.version() {
1780 self.invalidate_tree_sitter_data(self.text.snapshot());
1781 }
1782 cx.emit(BufferEvent::Reparsed);
1783 cx.notify();
1784 }
1785
1786 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1787 self.parse_status.1.clone()
1788 }
1789
1790 /// Wait until the buffer is no longer parsing
1791 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1792 let mut parse_status = self.parse_status();
1793 async move {
1794 while *parse_status.borrow() != ParseStatus::Idle {
1795 if parse_status.changed().await.is_err() {
1796 break;
1797 }
1798 }
1799 }
1800 }
1801
1802 /// Assign to the buffer a set of diagnostics created by a given language server.
1803 pub fn update_diagnostics(
1804 &mut self,
1805 server_id: LanguageServerId,
1806 diagnostics: DiagnosticSet,
1807 cx: &mut Context<Self>,
1808 ) {
1809 let lamport_timestamp = self.text.lamport_clock.tick();
1810 let op = Operation::UpdateDiagnostics {
1811 server_id,
1812 diagnostics: diagnostics.iter().cloned().collect(),
1813 lamport_timestamp,
1814 };
1815
1816 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1817 self.send_operation(op, true, cx);
1818 }
1819
1820 pub fn buffer_diagnostics(
1821 &self,
1822 for_server: Option<LanguageServerId>,
1823 ) -> Vec<&DiagnosticEntry<Anchor>> {
1824 match for_server {
1825 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1826 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1827 Err(_) => Vec::new(),
1828 },
1829 None => self
1830 .diagnostics
1831 .iter()
1832 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1833 .collect(),
1834 }
1835 }
1836
1837 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1838 if let Some(indent_sizes) = self.compute_autoindents() {
1839 let indent_sizes = cx.background_spawn(indent_sizes);
1840 match cx
1841 .background_executor()
1842 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1843 {
1844 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1845 Err(indent_sizes) => {
1846 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1847 let indent_sizes = indent_sizes.await;
1848 this.update(cx, |this, cx| {
1849 this.apply_autoindents(indent_sizes, cx);
1850 })
1851 .ok();
1852 }));
1853 }
1854 }
1855 } else {
1856 self.autoindent_requests.clear();
1857 for tx in self.wait_for_autoindent_txs.drain(..) {
1858 tx.send(()).ok();
1859 }
1860 }
1861 }
1862
1863 fn compute_autoindents(
1864 &self,
1865 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1866 let max_rows_between_yields = 100;
1867 let snapshot = self.snapshot();
1868 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1869 return None;
1870 }
1871
1872 let autoindent_requests = self.autoindent_requests.clone();
1873 Some(async move {
1874 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1875 for request in autoindent_requests {
1876 // Resolve each edited range to its row in the current buffer and in the
1877 // buffer before this batch of edits.
1878 let mut row_ranges = Vec::new();
1879 let mut old_to_new_rows = BTreeMap::new();
1880 let mut language_indent_sizes_by_new_row = Vec::new();
1881 for entry in &request.entries {
1882 let position = entry.range.start;
1883 let new_row = position.to_point(&snapshot).row;
1884 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1885 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1886
1887 if !entry.first_line_is_new {
1888 let old_row = position.to_point(&request.before_edit).row;
1889 old_to_new_rows.insert(old_row, new_row);
1890 }
1891 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1892 }
1893
1894 // Build a map containing the suggested indentation for each of the edited lines
1895 // with respect to the state of the buffer before these edits. This map is keyed
1896 // by the rows for these lines in the current state of the buffer.
1897 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1898 let old_edited_ranges =
1899 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1900 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1901 let mut language_indent_size = IndentSize::default();
1902 for old_edited_range in old_edited_ranges {
1903 let suggestions = request
1904 .before_edit
1905 .suggest_autoindents(old_edited_range.clone())
1906 .into_iter()
1907 .flatten();
1908 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1909 if let Some(suggestion) = suggestion {
1910 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1911
1912 // Find the indent size based on the language for this row.
1913 while let Some((row, size)) = language_indent_sizes.peek() {
1914 if *row > new_row {
1915 break;
1916 }
1917 language_indent_size = *size;
1918 language_indent_sizes.next();
1919 }
1920
1921 let suggested_indent = old_to_new_rows
1922 .get(&suggestion.basis_row)
1923 .and_then(|from_row| {
1924 Some(old_suggestions.get(from_row).copied()?.0)
1925 })
1926 .unwrap_or_else(|| {
1927 request
1928 .before_edit
1929 .indent_size_for_line(suggestion.basis_row)
1930 })
1931 .with_delta(suggestion.delta, language_indent_size);
1932 old_suggestions
1933 .insert(new_row, (suggested_indent, suggestion.within_error));
1934 }
1935 }
1936 yield_now().await;
1937 }
1938
1939 // Compute new suggestions for each line, but only include them in the result
1940 // if they differ from the old suggestion for that line.
1941 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1942 let mut language_indent_size = IndentSize::default();
1943 for (row_range, original_indent_column) in row_ranges {
1944 let new_edited_row_range = if request.is_block_mode {
1945 row_range.start..row_range.start + 1
1946 } else {
1947 row_range.clone()
1948 };
1949
1950 let suggestions = snapshot
1951 .suggest_autoindents(new_edited_row_range.clone())
1952 .into_iter()
1953 .flatten();
1954 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1955 if let Some(suggestion) = suggestion {
1956 // Find the indent size based on the language for this row.
1957 while let Some((row, size)) = language_indent_sizes.peek() {
1958 if *row > new_row {
1959 break;
1960 }
1961 language_indent_size = *size;
1962 language_indent_sizes.next();
1963 }
1964
1965 let suggested_indent = indent_sizes
1966 .get(&suggestion.basis_row)
1967 .copied()
1968 .map(|e| e.0)
1969 .unwrap_or_else(|| {
1970 snapshot.indent_size_for_line(suggestion.basis_row)
1971 })
1972 .with_delta(suggestion.delta, language_indent_size);
1973
1974 if old_suggestions.get(&new_row).is_none_or(
1975 |(old_indentation, was_within_error)| {
1976 suggested_indent != *old_indentation
1977 && (!suggestion.within_error || *was_within_error)
1978 },
1979 ) {
1980 indent_sizes.insert(
1981 new_row,
1982 (suggested_indent, request.ignore_empty_lines),
1983 );
1984 }
1985 }
1986 }
1987
1988 if let (true, Some(original_indent_column)) =
1989 (request.is_block_mode, original_indent_column)
1990 {
1991 let new_indent =
1992 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1993 *indent
1994 } else {
1995 snapshot.indent_size_for_line(row_range.start)
1996 };
1997 let delta = new_indent.len as i64 - original_indent_column as i64;
1998 if delta != 0 {
1999 for row in row_range.skip(1) {
2000 indent_sizes.entry(row).or_insert_with(|| {
2001 let mut size = snapshot.indent_size_for_line(row);
2002 if size.kind == new_indent.kind {
2003 match delta.cmp(&0) {
2004 Ordering::Greater => size.len += delta as u32,
2005 Ordering::Less => {
2006 size.len = size.len.saturating_sub(-delta as u32)
2007 }
2008 Ordering::Equal => {}
2009 }
2010 }
2011 (size, request.ignore_empty_lines)
2012 });
2013 }
2014 }
2015 }
2016
2017 yield_now().await;
2018 }
2019 }
2020
2021 indent_sizes
2022 .into_iter()
2023 .filter_map(|(row, (indent, ignore_empty_lines))| {
2024 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2025 None
2026 } else {
2027 Some((row, indent))
2028 }
2029 })
2030 .collect()
2031 })
2032 }
2033
2034 fn apply_autoindents(
2035 &mut self,
2036 indent_sizes: BTreeMap<u32, IndentSize>,
2037 cx: &mut Context<Self>,
2038 ) {
2039 self.autoindent_requests.clear();
2040 for tx in self.wait_for_autoindent_txs.drain(..) {
2041 tx.send(()).ok();
2042 }
2043
2044 let edits: Vec<_> = indent_sizes
2045 .into_iter()
2046 .filter_map(|(row, indent_size)| {
2047 let current_size = indent_size_for_line(self, row);
2048 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2049 })
2050 .collect();
2051
2052 let preserve_preview = self.preserve_preview();
2053 self.edit(edits, None, cx);
2054 if preserve_preview {
2055 self.refresh_preview();
2056 }
2057 }
2058
2059 /// Create a minimal edit that will cause the given row to be indented
2060 /// with the given size. After applying this edit, the length of the line
2061 /// will always be at least `new_size.len`.
2062 pub fn edit_for_indent_size_adjustment(
2063 row: u32,
2064 current_size: IndentSize,
2065 new_size: IndentSize,
2066 ) -> Option<(Range<Point>, String)> {
2067 if new_size.kind == current_size.kind {
2068 match new_size.len.cmp(¤t_size.len) {
2069 Ordering::Greater => {
2070 let point = Point::new(row, 0);
2071 Some((
2072 point..point,
2073 iter::repeat(new_size.char())
2074 .take((new_size.len - current_size.len) as usize)
2075 .collect::<String>(),
2076 ))
2077 }
2078
2079 Ordering::Less => Some((
2080 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2081 String::new(),
2082 )),
2083
2084 Ordering::Equal => None,
2085 }
2086 } else {
2087 Some((
2088 Point::new(row, 0)..Point::new(row, current_size.len),
2089 iter::repeat(new_size.char())
2090 .take(new_size.len as usize)
2091 .collect::<String>(),
2092 ))
2093 }
2094 }
2095
2096 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2097 /// and the given new text.
2098 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2099 let old_text = self.as_rope().clone();
2100 let base_version = self.version();
2101 cx.background_executor()
2102 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2103 let old_text = old_text.to_string();
2104 let line_ending = LineEnding::detect(&new_text);
2105 LineEnding::normalize(&mut new_text);
2106 let edits = text_diff(&old_text, &new_text);
2107 Diff {
2108 base_version,
2109 line_ending,
2110 edits,
2111 }
2112 })
2113 }
2114
2115 /// Spawns a background task that searches the buffer for any whitespace
2116 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2117 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2118 let old_text = self.as_rope().clone();
2119 let line_ending = self.line_ending();
2120 let base_version = self.version();
2121 cx.background_spawn(async move {
2122 let ranges = trailing_whitespace_ranges(&old_text);
2123 let empty = Arc::<str>::from("");
2124 Diff {
2125 base_version,
2126 line_ending,
2127 edits: ranges
2128 .into_iter()
2129 .map(|range| (range, empty.clone()))
2130 .collect(),
2131 }
2132 })
2133 }
2134
2135 /// Ensures that the buffer ends with a single newline character, and
2136 /// no other whitespace. Skips if the buffer is empty.
2137 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2138 let len = self.len();
2139 if len == 0 {
2140 return;
2141 }
2142 let mut offset = len;
2143 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2144 let non_whitespace_len = chunk
2145 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2146 .len();
2147 offset -= chunk.len();
2148 offset += non_whitespace_len;
2149 if non_whitespace_len != 0 {
2150 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2151 return;
2152 }
2153 break;
2154 }
2155 }
2156 self.edit([(offset..len, "\n")], None, cx);
2157 }
2158
2159 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2160 /// calculated, then adjust the diff to account for those changes, and discard any
2161 /// parts of the diff that conflict with those changes.
2162 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2163 let snapshot = self.snapshot();
2164 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2165 let mut delta = 0;
2166 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2167 while let Some(edit_since) = edits_since.peek() {
2168 // If the edit occurs after a diff hunk, then it does not
2169 // affect that hunk.
2170 if edit_since.old.start > range.end {
2171 break;
2172 }
2173 // If the edit precedes the diff hunk, then adjust the hunk
2174 // to reflect the edit.
2175 else if edit_since.old.end < range.start {
2176 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2177 edits_since.next();
2178 }
2179 // If the edit intersects a diff hunk, then discard that hunk.
2180 else {
2181 return None;
2182 }
2183 }
2184
2185 let start = (range.start as i64 + delta) as usize;
2186 let end = (range.end as i64 + delta) as usize;
2187 Some((start..end, new_text))
2188 });
2189
2190 self.start_transaction();
2191 self.text.set_line_ending(diff.line_ending);
2192 self.edit(adjusted_edits, None, cx);
2193 self.end_transaction(cx)
2194 }
2195
2196 pub fn has_unsaved_edits(&self) -> bool {
2197 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2198
2199 if last_version == self.version {
2200 self.has_unsaved_edits
2201 .set((last_version, has_unsaved_edits));
2202 return has_unsaved_edits;
2203 }
2204
2205 let has_edits = self.has_edits_since(&self.saved_version);
2206 self.has_unsaved_edits
2207 .set((self.version.clone(), has_edits));
2208 has_edits
2209 }
2210
2211 /// Checks if the buffer has unsaved changes.
2212 pub fn is_dirty(&self) -> bool {
2213 if self.capability == Capability::ReadOnly {
2214 return false;
2215 }
2216 if self.has_conflict {
2217 return true;
2218 }
2219 match self.file.as_ref().map(|f| f.disk_state()) {
2220 Some(DiskState::New) | Some(DiskState::Deleted) => {
2221 !self.is_empty() && self.has_unsaved_edits()
2222 }
2223 _ => self.has_unsaved_edits(),
2224 }
2225 }
2226
2227 /// Marks the buffer as having a conflict regardless of current buffer state.
2228 pub fn set_conflict(&mut self) {
2229 self.has_conflict = true;
2230 }
2231
2232 /// Checks if the buffer and its file have both changed since the buffer
2233 /// was last saved or reloaded.
2234 pub fn has_conflict(&self) -> bool {
2235 if self.has_conflict {
2236 return true;
2237 }
2238 let Some(file) = self.file.as_ref() else {
2239 return false;
2240 };
2241 match file.disk_state() {
2242 DiskState::New => false,
2243 DiskState::Present { mtime } => match self.saved_mtime {
2244 Some(saved_mtime) => {
2245 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2246 }
2247 None => true,
2248 },
2249 DiskState::Deleted => false,
2250 }
2251 }
2252
2253 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2254 pub fn subscribe(&mut self) -> Subscription<usize> {
2255 self.text.subscribe()
2256 }
2257
2258 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2259 ///
2260 /// This allows downstream code to check if the buffer's text has changed without
2261 /// waiting for an effect cycle, which would be required if using eents.
2262 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2263 if let Err(ix) = self
2264 .change_bits
2265 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2266 {
2267 self.change_bits.insert(ix, bit);
2268 }
2269 }
2270
2271 /// Set the change bit for all "listeners".
2272 fn was_changed(&mut self) {
2273 self.change_bits.retain(|change_bit| {
2274 change_bit
2275 .upgrade()
2276 .inspect(|bit| {
2277 _ = bit.replace(true);
2278 })
2279 .is_some()
2280 });
2281 }
2282
2283 /// Starts a transaction, if one is not already in-progress. When undoing or
2284 /// redoing edits, all of the edits performed within a transaction are undone
2285 /// or redone together.
2286 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2287 self.start_transaction_at(Instant::now())
2288 }
2289
2290 /// Starts a transaction, providing the current time. Subsequent transactions
2291 /// that occur within a short period of time will be grouped together. This
2292 /// is controlled by the buffer's undo grouping duration.
2293 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2294 self.transaction_depth += 1;
2295 if self.was_dirty_before_starting_transaction.is_none() {
2296 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2297 }
2298 self.text.start_transaction_at(now)
2299 }
2300
2301 /// Terminates the current transaction, if this is the outermost transaction.
2302 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2303 self.end_transaction_at(Instant::now(), cx)
2304 }
2305
2306 /// Terminates the current transaction, providing the current time. Subsequent transactions
2307 /// that occur within a short period of time will be grouped together. This
2308 /// is controlled by the buffer's undo grouping duration.
2309 pub fn end_transaction_at(
2310 &mut self,
2311 now: Instant,
2312 cx: &mut Context<Self>,
2313 ) -> Option<TransactionId> {
2314 assert!(self.transaction_depth > 0);
2315 self.transaction_depth -= 1;
2316 let was_dirty = if self.transaction_depth == 0 {
2317 self.was_dirty_before_starting_transaction.take().unwrap()
2318 } else {
2319 false
2320 };
2321 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2322 self.did_edit(&start_version, was_dirty, cx);
2323 Some(transaction_id)
2324 } else {
2325 None
2326 }
2327 }
2328
2329 /// Manually add a transaction to the buffer's undo history.
2330 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2331 self.text.push_transaction(transaction, now);
2332 }
2333
2334 /// Differs from `push_transaction` in that it does not clear the redo
2335 /// stack. Intended to be used to create a parent transaction to merge
2336 /// potential child transactions into.
2337 ///
2338 /// The caller is responsible for removing it from the undo history using
2339 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2340 /// are merged into this transaction, the caller is responsible for ensuring
2341 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2342 /// cleared is to create transactions with the usual `start_transaction` and
2343 /// `end_transaction` methods and merging the resulting transactions into
2344 /// the transaction created by this method
2345 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2346 self.text.push_empty_transaction(now)
2347 }
2348
2349 /// Prevent the last transaction from being grouped with any subsequent transactions,
2350 /// even if they occur with the buffer's undo grouping duration.
2351 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2352 self.text.finalize_last_transaction()
2353 }
2354
2355 /// Manually group all changes since a given transaction.
2356 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2357 self.text.group_until_transaction(transaction_id);
2358 }
2359
2360 /// Manually remove a transaction from the buffer's undo history
2361 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2362 self.text.forget_transaction(transaction_id)
2363 }
2364
2365 /// Retrieve a transaction from the buffer's undo history
2366 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2367 self.text.get_transaction(transaction_id)
2368 }
2369
2370 /// Manually merge two transactions in the buffer's undo history.
2371 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2372 self.text.merge_transactions(transaction, destination);
2373 }
2374
2375 /// Waits for the buffer to receive operations with the given timestamps.
2376 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2377 &mut self,
2378 edit_ids: It,
2379 ) -> impl Future<Output = Result<()>> + use<It> {
2380 self.text.wait_for_edits(edit_ids)
2381 }
2382
2383 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2384 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2385 &mut self,
2386 anchors: It,
2387 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2388 self.text.wait_for_anchors(anchors)
2389 }
2390
2391 /// Waits for the buffer to receive operations up to the given version.
2392 pub fn wait_for_version(
2393 &mut self,
2394 version: clock::Global,
2395 ) -> impl Future<Output = Result<()>> + use<> {
2396 self.text.wait_for_version(version)
2397 }
2398
2399 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2400 /// [`Buffer::wait_for_version`] to resolve with an error.
2401 pub fn give_up_waiting(&mut self) {
2402 self.text.give_up_waiting();
2403 }
2404
2405 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2406 let mut rx = None;
2407 if !self.autoindent_requests.is_empty() {
2408 let channel = oneshot::channel();
2409 self.wait_for_autoindent_txs.push(channel.0);
2410 rx = Some(channel.1);
2411 }
2412 rx
2413 }
2414
2415 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2416 pub fn set_active_selections(
2417 &mut self,
2418 selections: Arc<[Selection<Anchor>]>,
2419 line_mode: bool,
2420 cursor_shape: CursorShape,
2421 cx: &mut Context<Self>,
2422 ) {
2423 let lamport_timestamp = self.text.lamport_clock.tick();
2424 self.remote_selections.insert(
2425 self.text.replica_id(),
2426 SelectionSet {
2427 selections: selections.clone(),
2428 lamport_timestamp,
2429 line_mode,
2430 cursor_shape,
2431 },
2432 );
2433 self.send_operation(
2434 Operation::UpdateSelections {
2435 selections,
2436 line_mode,
2437 lamport_timestamp,
2438 cursor_shape,
2439 },
2440 true,
2441 cx,
2442 );
2443 self.non_text_state_update_count += 1;
2444 cx.notify();
2445 }
2446
2447 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2448 /// this replica.
2449 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2450 if self
2451 .remote_selections
2452 .get(&self.text.replica_id())
2453 .is_none_or(|set| !set.selections.is_empty())
2454 {
2455 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2456 }
2457 }
2458
2459 pub fn set_agent_selections(
2460 &mut self,
2461 selections: Arc<[Selection<Anchor>]>,
2462 line_mode: bool,
2463 cursor_shape: CursorShape,
2464 cx: &mut Context<Self>,
2465 ) {
2466 let lamport_timestamp = self.text.lamport_clock.tick();
2467 self.remote_selections.insert(
2468 ReplicaId::AGENT,
2469 SelectionSet {
2470 selections,
2471 lamport_timestamp,
2472 line_mode,
2473 cursor_shape,
2474 },
2475 );
2476 self.non_text_state_update_count += 1;
2477 cx.notify();
2478 }
2479
2480 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2481 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2482 }
2483
2484 /// Replaces the buffer's entire text.
2485 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2486 where
2487 T: Into<Arc<str>>,
2488 {
2489 self.autoindent_requests.clear();
2490 self.edit([(0..self.len(), text)], None, cx)
2491 }
2492
2493 /// Appends the given text to the end of the buffer.
2494 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2495 where
2496 T: Into<Arc<str>>,
2497 {
2498 self.edit([(self.len()..self.len(), text)], None, cx)
2499 }
2500
2501 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2502 /// delete, and a string of text to insert at that location.
2503 ///
2504 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2505 /// request for the edited ranges, which will be processed when the buffer finishes
2506 /// parsing.
2507 ///
2508 /// Parsing takes place at the end of a transaction, and may compute synchronously
2509 /// or asynchronously, depending on the changes.
2510 pub fn edit<I, S, T>(
2511 &mut self,
2512 edits_iter: I,
2513 autoindent_mode: Option<AutoindentMode>,
2514 cx: &mut Context<Self>,
2515 ) -> Option<clock::Lamport>
2516 where
2517 I: IntoIterator<Item = (Range<S>, T)>,
2518 S: ToOffset,
2519 T: Into<Arc<str>>,
2520 {
2521 // Skip invalid edits and coalesce contiguous ones.
2522 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2523
2524 for (range, new_text) in edits_iter {
2525 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2526
2527 if range.start > range.end {
2528 mem::swap(&mut range.start, &mut range.end);
2529 }
2530 let new_text = new_text.into();
2531 if !new_text.is_empty() || !range.is_empty() {
2532 if let Some((prev_range, prev_text)) = edits.last_mut()
2533 && prev_range.end >= range.start
2534 {
2535 prev_range.end = cmp::max(prev_range.end, range.end);
2536 *prev_text = format!("{prev_text}{new_text}").into();
2537 } else {
2538 edits.push((range, new_text));
2539 }
2540 }
2541 }
2542 if edits.is_empty() {
2543 return None;
2544 }
2545
2546 self.start_transaction();
2547 self.pending_autoindent.take();
2548 let autoindent_request = autoindent_mode
2549 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2550
2551 let edit_operation = self.text.edit(edits.iter().cloned());
2552 let edit_id = edit_operation.timestamp();
2553
2554 if let Some((before_edit, mode)) = autoindent_request {
2555 let mut delta = 0isize;
2556 let mut previous_setting = None;
2557 let entries: Vec<_> = edits
2558 .into_iter()
2559 .enumerate()
2560 .zip(&edit_operation.as_edit().unwrap().new_text)
2561 .filter(|((_, (range, _)), _)| {
2562 let language = before_edit.language_at(range.start);
2563 let language_id = language.map(|l| l.id());
2564 if let Some((cached_language_id, auto_indent)) = previous_setting
2565 && cached_language_id == language_id
2566 {
2567 auto_indent
2568 } else {
2569 // The auto-indent setting is not present in editorconfigs, hence
2570 // we can avoid passing the file here.
2571 let auto_indent =
2572 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2573 previous_setting = Some((language_id, auto_indent));
2574 auto_indent
2575 }
2576 })
2577 .map(|((ix, (range, _)), new_text)| {
2578 let new_text_length = new_text.len();
2579 let old_start = range.start.to_point(&before_edit);
2580 let new_start = (delta + range.start as isize) as usize;
2581 let range_len = range.end - range.start;
2582 delta += new_text_length as isize - range_len as isize;
2583
2584 // Decide what range of the insertion to auto-indent, and whether
2585 // the first line of the insertion should be considered a newly-inserted line
2586 // or an edit to an existing line.
2587 let mut range_of_insertion_to_indent = 0..new_text_length;
2588 let mut first_line_is_new = true;
2589
2590 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2591 let old_line_end = before_edit.line_len(old_start.row);
2592
2593 if old_start.column > old_line_start {
2594 first_line_is_new = false;
2595 }
2596
2597 if !new_text.contains('\n')
2598 && (old_start.column + (range_len as u32) < old_line_end
2599 || old_line_end == old_line_start)
2600 {
2601 first_line_is_new = false;
2602 }
2603
2604 // When inserting text starting with a newline, avoid auto-indenting the
2605 // previous line.
2606 if new_text.starts_with('\n') {
2607 range_of_insertion_to_indent.start += 1;
2608 first_line_is_new = true;
2609 }
2610
2611 let mut original_indent_column = None;
2612 if let AutoindentMode::Block {
2613 original_indent_columns,
2614 } = &mode
2615 {
2616 original_indent_column = Some(if new_text.starts_with('\n') {
2617 indent_size_for_text(
2618 new_text[range_of_insertion_to_indent.clone()].chars(),
2619 )
2620 .len
2621 } else {
2622 original_indent_columns
2623 .get(ix)
2624 .copied()
2625 .flatten()
2626 .unwrap_or_else(|| {
2627 indent_size_for_text(
2628 new_text[range_of_insertion_to_indent.clone()].chars(),
2629 )
2630 .len
2631 })
2632 });
2633
2634 // Avoid auto-indenting the line after the edit.
2635 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2636 range_of_insertion_to_indent.end -= 1;
2637 }
2638 }
2639
2640 AutoindentRequestEntry {
2641 first_line_is_new,
2642 original_indent_column,
2643 indent_size: before_edit.language_indent_size_at(range.start, cx),
2644 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2645 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2646 }
2647 })
2648 .collect();
2649
2650 if !entries.is_empty() {
2651 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2652 before_edit,
2653 entries,
2654 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2655 ignore_empty_lines: false,
2656 }));
2657 }
2658 }
2659
2660 self.end_transaction(cx);
2661 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2662 Some(edit_id)
2663 }
2664
2665 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2666 self.was_changed();
2667
2668 if self.edits_since::<usize>(old_version).next().is_none() {
2669 return;
2670 }
2671
2672 self.reparse(cx, true);
2673 cx.emit(BufferEvent::Edited);
2674 if was_dirty != self.is_dirty() {
2675 cx.emit(BufferEvent::DirtyChanged);
2676 }
2677 cx.notify();
2678 }
2679
2680 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2681 where
2682 I: IntoIterator<Item = Range<T>>,
2683 T: ToOffset + Copy,
2684 {
2685 let before_edit = self.snapshot();
2686 let entries = ranges
2687 .into_iter()
2688 .map(|range| AutoindentRequestEntry {
2689 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2690 first_line_is_new: true,
2691 indent_size: before_edit.language_indent_size_at(range.start, cx),
2692 original_indent_column: None,
2693 })
2694 .collect();
2695 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2696 before_edit,
2697 entries,
2698 is_block_mode: false,
2699 ignore_empty_lines: true,
2700 }));
2701 self.request_autoindent(cx);
2702 }
2703
2704 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2705 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2706 pub fn insert_empty_line(
2707 &mut self,
2708 position: impl ToPoint,
2709 space_above: bool,
2710 space_below: bool,
2711 cx: &mut Context<Self>,
2712 ) -> Point {
2713 let mut position = position.to_point(self);
2714
2715 self.start_transaction();
2716
2717 self.edit(
2718 [(position..position, "\n")],
2719 Some(AutoindentMode::EachLine),
2720 cx,
2721 );
2722
2723 if position.column > 0 {
2724 position += Point::new(1, 0);
2725 }
2726
2727 if !self.is_line_blank(position.row) {
2728 self.edit(
2729 [(position..position, "\n")],
2730 Some(AutoindentMode::EachLine),
2731 cx,
2732 );
2733 }
2734
2735 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2736 self.edit(
2737 [(position..position, "\n")],
2738 Some(AutoindentMode::EachLine),
2739 cx,
2740 );
2741 position.row += 1;
2742 }
2743
2744 if space_below
2745 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2746 {
2747 self.edit(
2748 [(position..position, "\n")],
2749 Some(AutoindentMode::EachLine),
2750 cx,
2751 );
2752 }
2753
2754 self.end_transaction(cx);
2755
2756 position
2757 }
2758
2759 /// Applies the given remote operations to the buffer.
2760 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2761 self.pending_autoindent.take();
2762 let was_dirty = self.is_dirty();
2763 let old_version = self.version.clone();
2764 let mut deferred_ops = Vec::new();
2765 let buffer_ops = ops
2766 .into_iter()
2767 .filter_map(|op| match op {
2768 Operation::Buffer(op) => Some(op),
2769 _ => {
2770 if self.can_apply_op(&op) {
2771 self.apply_op(op, cx);
2772 } else {
2773 deferred_ops.push(op);
2774 }
2775 None
2776 }
2777 })
2778 .collect::<Vec<_>>();
2779 for operation in buffer_ops.iter() {
2780 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2781 }
2782 self.text.apply_ops(buffer_ops);
2783 self.deferred_ops.insert(deferred_ops);
2784 self.flush_deferred_ops(cx);
2785 self.did_edit(&old_version, was_dirty, cx);
2786 // Notify independently of whether the buffer was edited as the operations could include a
2787 // selection update.
2788 cx.notify();
2789 }
2790
2791 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2792 let mut deferred_ops = Vec::new();
2793 for op in self.deferred_ops.drain().iter().cloned() {
2794 if self.can_apply_op(&op) {
2795 self.apply_op(op, cx);
2796 } else {
2797 deferred_ops.push(op);
2798 }
2799 }
2800 self.deferred_ops.insert(deferred_ops);
2801 }
2802
2803 pub fn has_deferred_ops(&self) -> bool {
2804 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2805 }
2806
2807 fn can_apply_op(&self, operation: &Operation) -> bool {
2808 match operation {
2809 Operation::Buffer(_) => {
2810 unreachable!("buffer operations should never be applied at this layer")
2811 }
2812 Operation::UpdateDiagnostics {
2813 diagnostics: diagnostic_set,
2814 ..
2815 } => diagnostic_set.iter().all(|diagnostic| {
2816 self.text.can_resolve(&diagnostic.range.start)
2817 && self.text.can_resolve(&diagnostic.range.end)
2818 }),
2819 Operation::UpdateSelections { selections, .. } => selections
2820 .iter()
2821 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2822 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2823 }
2824 }
2825
2826 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2827 match operation {
2828 Operation::Buffer(_) => {
2829 unreachable!("buffer operations should never be applied at this layer")
2830 }
2831 Operation::UpdateDiagnostics {
2832 server_id,
2833 diagnostics: diagnostic_set,
2834 lamport_timestamp,
2835 } => {
2836 let snapshot = self.snapshot();
2837 self.apply_diagnostic_update(
2838 server_id,
2839 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2840 lamport_timestamp,
2841 cx,
2842 );
2843 }
2844 Operation::UpdateSelections {
2845 selections,
2846 lamport_timestamp,
2847 line_mode,
2848 cursor_shape,
2849 } => {
2850 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2851 && set.lamport_timestamp > lamport_timestamp
2852 {
2853 return;
2854 }
2855
2856 self.remote_selections.insert(
2857 lamport_timestamp.replica_id,
2858 SelectionSet {
2859 selections,
2860 lamport_timestamp,
2861 line_mode,
2862 cursor_shape,
2863 },
2864 );
2865 self.text.lamport_clock.observe(lamport_timestamp);
2866 self.non_text_state_update_count += 1;
2867 }
2868 Operation::UpdateCompletionTriggers {
2869 triggers,
2870 lamport_timestamp,
2871 server_id,
2872 } => {
2873 if triggers.is_empty() {
2874 self.completion_triggers_per_language_server
2875 .remove(&server_id);
2876 self.completion_triggers = self
2877 .completion_triggers_per_language_server
2878 .values()
2879 .flat_map(|triggers| triggers.iter().cloned())
2880 .collect();
2881 } else {
2882 self.completion_triggers_per_language_server
2883 .insert(server_id, triggers.iter().cloned().collect());
2884 self.completion_triggers.extend(triggers);
2885 }
2886 self.text.lamport_clock.observe(lamport_timestamp);
2887 }
2888 Operation::UpdateLineEnding {
2889 line_ending,
2890 lamport_timestamp,
2891 } => {
2892 self.text.set_line_ending(line_ending);
2893 self.text.lamport_clock.observe(lamport_timestamp);
2894 }
2895 }
2896 }
2897
2898 fn apply_diagnostic_update(
2899 &mut self,
2900 server_id: LanguageServerId,
2901 diagnostics: DiagnosticSet,
2902 lamport_timestamp: clock::Lamport,
2903 cx: &mut Context<Self>,
2904 ) {
2905 if lamport_timestamp > self.diagnostics_timestamp {
2906 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2907 if diagnostics.is_empty() {
2908 if let Ok(ix) = ix {
2909 self.diagnostics.remove(ix);
2910 }
2911 } else {
2912 match ix {
2913 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2914 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2915 };
2916 }
2917 self.diagnostics_timestamp = lamport_timestamp;
2918 self.non_text_state_update_count += 1;
2919 self.text.lamport_clock.observe(lamport_timestamp);
2920 cx.notify();
2921 cx.emit(BufferEvent::DiagnosticsUpdated);
2922 }
2923 }
2924
2925 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2926 self.was_changed();
2927 cx.emit(BufferEvent::Operation {
2928 operation,
2929 is_local,
2930 });
2931 }
2932
2933 /// Removes the selections for a given peer.
2934 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2935 self.remote_selections.remove(&replica_id);
2936 cx.notify();
2937 }
2938
2939 /// Undoes the most recent transaction.
2940 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2941 let was_dirty = self.is_dirty();
2942 let old_version = self.version.clone();
2943
2944 if let Some((transaction_id, operation)) = self.text.undo() {
2945 self.send_operation(Operation::Buffer(operation), true, cx);
2946 self.did_edit(&old_version, was_dirty, cx);
2947 Some(transaction_id)
2948 } else {
2949 None
2950 }
2951 }
2952
2953 /// Manually undoes a specific transaction in the buffer's undo history.
2954 pub fn undo_transaction(
2955 &mut self,
2956 transaction_id: TransactionId,
2957 cx: &mut Context<Self>,
2958 ) -> bool {
2959 let was_dirty = self.is_dirty();
2960 let old_version = self.version.clone();
2961 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2962 self.send_operation(Operation::Buffer(operation), true, cx);
2963 self.did_edit(&old_version, was_dirty, cx);
2964 true
2965 } else {
2966 false
2967 }
2968 }
2969
2970 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2971 pub fn undo_to_transaction(
2972 &mut self,
2973 transaction_id: TransactionId,
2974 cx: &mut Context<Self>,
2975 ) -> bool {
2976 let was_dirty = self.is_dirty();
2977 let old_version = self.version.clone();
2978
2979 let operations = self.text.undo_to_transaction(transaction_id);
2980 let undone = !operations.is_empty();
2981 for operation in operations {
2982 self.send_operation(Operation::Buffer(operation), true, cx);
2983 }
2984 if undone {
2985 self.did_edit(&old_version, was_dirty, cx)
2986 }
2987 undone
2988 }
2989
2990 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2991 let was_dirty = self.is_dirty();
2992 let operation = self.text.undo_operations(counts);
2993 let old_version = self.version.clone();
2994 self.send_operation(Operation::Buffer(operation), true, cx);
2995 self.did_edit(&old_version, was_dirty, cx);
2996 }
2997
2998 /// Manually redoes a specific transaction in the buffer's redo history.
2999 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3000 let was_dirty = self.is_dirty();
3001 let old_version = self.version.clone();
3002
3003 if let Some((transaction_id, operation)) = self.text.redo() {
3004 self.send_operation(Operation::Buffer(operation), true, cx);
3005 self.did_edit(&old_version, was_dirty, cx);
3006 Some(transaction_id)
3007 } else {
3008 None
3009 }
3010 }
3011
3012 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3013 pub fn redo_to_transaction(
3014 &mut self,
3015 transaction_id: TransactionId,
3016 cx: &mut Context<Self>,
3017 ) -> bool {
3018 let was_dirty = self.is_dirty();
3019 let old_version = self.version.clone();
3020
3021 let operations = self.text.redo_to_transaction(transaction_id);
3022 let redone = !operations.is_empty();
3023 for operation in operations {
3024 self.send_operation(Operation::Buffer(operation), true, cx);
3025 }
3026 if redone {
3027 self.did_edit(&old_version, was_dirty, cx)
3028 }
3029 redone
3030 }
3031
3032 /// Override current completion triggers with the user-provided completion triggers.
3033 pub fn set_completion_triggers(
3034 &mut self,
3035 server_id: LanguageServerId,
3036 triggers: BTreeSet<String>,
3037 cx: &mut Context<Self>,
3038 ) {
3039 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3040 if triggers.is_empty() {
3041 self.completion_triggers_per_language_server
3042 .remove(&server_id);
3043 self.completion_triggers = self
3044 .completion_triggers_per_language_server
3045 .values()
3046 .flat_map(|triggers| triggers.iter().cloned())
3047 .collect();
3048 } else {
3049 self.completion_triggers_per_language_server
3050 .insert(server_id, triggers.clone());
3051 self.completion_triggers.extend(triggers.iter().cloned());
3052 }
3053 self.send_operation(
3054 Operation::UpdateCompletionTriggers {
3055 triggers: triggers.into_iter().collect(),
3056 lamport_timestamp: self.completion_triggers_timestamp,
3057 server_id,
3058 },
3059 true,
3060 cx,
3061 );
3062 cx.notify();
3063 }
3064
3065 /// Returns a list of strings which trigger a completion menu for this language.
3066 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3067 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3068 &self.completion_triggers
3069 }
3070
3071 /// Call this directly after performing edits to prevent the preview tab
3072 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3073 /// to return false until there are additional edits.
3074 pub fn refresh_preview(&mut self) {
3075 self.preview_version = self.version.clone();
3076 }
3077
3078 /// Whether we should preserve the preview status of a tab containing this buffer.
3079 pub fn preserve_preview(&self) -> bool {
3080 !self.has_edits_since(&self.preview_version)
3081 }
3082}
3083
3084#[doc(hidden)]
3085#[cfg(any(test, feature = "test-support"))]
3086impl Buffer {
3087 pub fn edit_via_marked_text(
3088 &mut self,
3089 marked_string: &str,
3090 autoindent_mode: Option<AutoindentMode>,
3091 cx: &mut Context<Self>,
3092 ) {
3093 let edits = self.edits_for_marked_text(marked_string);
3094 self.edit(edits, autoindent_mode, cx);
3095 }
3096
3097 pub fn set_group_interval(&mut self, group_interval: Duration) {
3098 self.text.set_group_interval(group_interval);
3099 }
3100
3101 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3102 where
3103 T: rand::Rng,
3104 {
3105 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3106 let mut last_end = None;
3107 for _ in 0..old_range_count {
3108 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3109 break;
3110 }
3111
3112 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3113 let mut range = self.random_byte_range(new_start, rng);
3114 if rng.random_bool(0.2) {
3115 mem::swap(&mut range.start, &mut range.end);
3116 }
3117 last_end = Some(range.end);
3118
3119 let new_text_len = rng.random_range(0..10);
3120 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3121 new_text = new_text.to_uppercase();
3122
3123 edits.push((range, new_text));
3124 }
3125 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3126 self.edit(edits, None, cx);
3127 }
3128
3129 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3130 let was_dirty = self.is_dirty();
3131 let old_version = self.version.clone();
3132
3133 let ops = self.text.randomly_undo_redo(rng);
3134 if !ops.is_empty() {
3135 for op in ops {
3136 self.send_operation(Operation::Buffer(op), true, cx);
3137 self.did_edit(&old_version, was_dirty, cx);
3138 }
3139 }
3140 }
3141}
3142
3143impl EventEmitter<BufferEvent> for Buffer {}
3144
3145impl Deref for Buffer {
3146 type Target = TextBuffer;
3147
3148 fn deref(&self) -> &Self::Target {
3149 &self.text
3150 }
3151}
3152
3153impl BufferSnapshot {
3154 /// Returns [`IndentSize`] for a given line that respects user settings and
3155 /// language preferences.
3156 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3157 indent_size_for_line(self, row)
3158 }
3159
3160 /// Returns [`IndentSize`] for a given position that respects user settings
3161 /// and language preferences.
3162 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3163 let settings = language_settings(
3164 self.language_at(position).map(|l| l.name()),
3165 self.file(),
3166 cx,
3167 );
3168 if settings.hard_tabs {
3169 IndentSize::tab()
3170 } else {
3171 IndentSize::spaces(settings.tab_size.get())
3172 }
3173 }
3174
3175 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3176 /// is passed in as `single_indent_size`.
3177 pub fn suggested_indents(
3178 &self,
3179 rows: impl Iterator<Item = u32>,
3180 single_indent_size: IndentSize,
3181 ) -> BTreeMap<u32, IndentSize> {
3182 let mut result = BTreeMap::new();
3183
3184 for row_range in contiguous_ranges(rows, 10) {
3185 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3186 Some(suggestions) => suggestions,
3187 _ => break,
3188 };
3189
3190 for (row, suggestion) in row_range.zip(suggestions) {
3191 let indent_size = if let Some(suggestion) = suggestion {
3192 result
3193 .get(&suggestion.basis_row)
3194 .copied()
3195 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3196 .with_delta(suggestion.delta, single_indent_size)
3197 } else {
3198 self.indent_size_for_line(row)
3199 };
3200
3201 result.insert(row, indent_size);
3202 }
3203 }
3204
3205 result
3206 }
3207
3208 fn suggest_autoindents(
3209 &self,
3210 row_range: Range<u32>,
3211 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3212 let config = &self.language.as_ref()?.config;
3213 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3214
3215 #[derive(Debug, Clone)]
3216 struct StartPosition {
3217 start: Point,
3218 suffix: SharedString,
3219 language: Arc<Language>,
3220 }
3221
3222 // Find the suggested indentation ranges based on the syntax tree.
3223 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3224 let end = Point::new(row_range.end, 0);
3225 let range = (start..end).to_offset(&self.text);
3226 let mut matches = self.syntax.matches_with_options(
3227 range.clone(),
3228 &self.text,
3229 TreeSitterOptions {
3230 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3231 max_start_depth: None,
3232 },
3233 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3234 );
3235 let indent_configs = matches
3236 .grammars()
3237 .iter()
3238 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3239 .collect::<Vec<_>>();
3240
3241 let mut indent_ranges = Vec::<Range<Point>>::new();
3242 let mut start_positions = Vec::<StartPosition>::new();
3243 let mut outdent_positions = Vec::<Point>::new();
3244 while let Some(mat) = matches.peek() {
3245 let mut start: Option<Point> = None;
3246 let mut end: Option<Point> = None;
3247
3248 let config = indent_configs[mat.grammar_index];
3249 for capture in mat.captures {
3250 if capture.index == config.indent_capture_ix {
3251 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3252 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3253 } else if Some(capture.index) == config.start_capture_ix {
3254 start = Some(Point::from_ts_point(capture.node.end_position()));
3255 } else if Some(capture.index) == config.end_capture_ix {
3256 end = Some(Point::from_ts_point(capture.node.start_position()));
3257 } else if Some(capture.index) == config.outdent_capture_ix {
3258 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3259 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3260 start_positions.push(StartPosition {
3261 start: Point::from_ts_point(capture.node.start_position()),
3262 suffix: suffix.clone(),
3263 language: mat.language.clone(),
3264 });
3265 }
3266 }
3267
3268 matches.advance();
3269 if let Some((start, end)) = start.zip(end) {
3270 if start.row == end.row {
3271 continue;
3272 }
3273 let range = start..end;
3274 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3275 Err(ix) => indent_ranges.insert(ix, range),
3276 Ok(ix) => {
3277 let prev_range = &mut indent_ranges[ix];
3278 prev_range.end = prev_range.end.max(range.end);
3279 }
3280 }
3281 }
3282 }
3283
3284 let mut error_ranges = Vec::<Range<Point>>::new();
3285 let mut matches = self
3286 .syntax
3287 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3288 while let Some(mat) = matches.peek() {
3289 let node = mat.captures[0].node;
3290 let start = Point::from_ts_point(node.start_position());
3291 let end = Point::from_ts_point(node.end_position());
3292 let range = start..end;
3293 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3294 Ok(ix) | Err(ix) => ix,
3295 };
3296 let mut end_ix = ix;
3297 while let Some(existing_range) = error_ranges.get(end_ix) {
3298 if existing_range.end < end {
3299 end_ix += 1;
3300 } else {
3301 break;
3302 }
3303 }
3304 error_ranges.splice(ix..end_ix, [range]);
3305 matches.advance();
3306 }
3307
3308 outdent_positions.sort();
3309 for outdent_position in outdent_positions {
3310 // find the innermost indent range containing this outdent_position
3311 // set its end to the outdent position
3312 if let Some(range_to_truncate) = indent_ranges
3313 .iter_mut()
3314 .rfind(|indent_range| indent_range.contains(&outdent_position))
3315 {
3316 range_to_truncate.end = outdent_position;
3317 }
3318 }
3319
3320 start_positions.sort_by_key(|b| b.start);
3321
3322 // Find the suggested indentation increases and decreased based on regexes.
3323 let mut regex_outdent_map = HashMap::default();
3324 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3325 let mut start_positions_iter = start_positions.iter().peekable();
3326
3327 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3328 self.for_each_line(
3329 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3330 ..Point::new(row_range.end, 0),
3331 |row, line| {
3332 let indent_len = self.indent_size_for_line(row).len;
3333 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3334 let row_language_config = row_language
3335 .as_ref()
3336 .map(|lang| lang.config())
3337 .unwrap_or(config);
3338
3339 if row_language_config
3340 .decrease_indent_pattern
3341 .as_ref()
3342 .is_some_and(|regex| regex.is_match(line))
3343 {
3344 indent_change_rows.push((row, Ordering::Less));
3345 }
3346 if row_language_config
3347 .increase_indent_pattern
3348 .as_ref()
3349 .is_some_and(|regex| regex.is_match(line))
3350 {
3351 indent_change_rows.push((row + 1, Ordering::Greater));
3352 }
3353 while let Some(pos) = start_positions_iter.peek() {
3354 if pos.start.row < row {
3355 let pos = start_positions_iter.next().unwrap().clone();
3356 last_seen_suffix
3357 .entry(pos.suffix.to_string())
3358 .or_default()
3359 .push(pos);
3360 } else {
3361 break;
3362 }
3363 }
3364 for rule in &row_language_config.decrease_indent_patterns {
3365 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3366 let row_start_column = self.indent_size_for_line(row).len;
3367 let basis_row = rule
3368 .valid_after
3369 .iter()
3370 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3371 .flatten()
3372 .filter(|pos| {
3373 row_language
3374 .as_ref()
3375 .or(self.language.as_ref())
3376 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3377 })
3378 .filter(|pos| pos.start.column <= row_start_column)
3379 .max_by_key(|pos| pos.start.row);
3380 if let Some(outdent_to) = basis_row {
3381 regex_outdent_map.insert(row, outdent_to.start.row);
3382 }
3383 break;
3384 }
3385 }
3386 },
3387 );
3388
3389 let mut indent_changes = indent_change_rows.into_iter().peekable();
3390 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3391 prev_non_blank_row.unwrap_or(0)
3392 } else {
3393 row_range.start.saturating_sub(1)
3394 };
3395
3396 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3397 Some(row_range.map(move |row| {
3398 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3399
3400 let mut indent_from_prev_row = false;
3401 let mut outdent_from_prev_row = false;
3402 let mut outdent_to_row = u32::MAX;
3403 let mut from_regex = false;
3404
3405 while let Some((indent_row, delta)) = indent_changes.peek() {
3406 match indent_row.cmp(&row) {
3407 Ordering::Equal => match delta {
3408 Ordering::Less => {
3409 from_regex = true;
3410 outdent_from_prev_row = true
3411 }
3412 Ordering::Greater => {
3413 indent_from_prev_row = true;
3414 from_regex = true
3415 }
3416 _ => {}
3417 },
3418
3419 Ordering::Greater => break,
3420 Ordering::Less => {}
3421 }
3422
3423 indent_changes.next();
3424 }
3425
3426 for range in &indent_ranges {
3427 if range.start.row >= row {
3428 break;
3429 }
3430 if range.start.row == prev_row && range.end > row_start {
3431 indent_from_prev_row = true;
3432 }
3433 if range.end > prev_row_start && range.end <= row_start {
3434 outdent_to_row = outdent_to_row.min(range.start.row);
3435 }
3436 }
3437
3438 if let Some(basis_row) = regex_outdent_map.get(&row) {
3439 indent_from_prev_row = false;
3440 outdent_to_row = *basis_row;
3441 from_regex = true;
3442 }
3443
3444 let within_error = error_ranges
3445 .iter()
3446 .any(|e| e.start.row < row && e.end > row_start);
3447
3448 let suggestion = if outdent_to_row == prev_row
3449 || (outdent_from_prev_row && indent_from_prev_row)
3450 {
3451 Some(IndentSuggestion {
3452 basis_row: prev_row,
3453 delta: Ordering::Equal,
3454 within_error: within_error && !from_regex,
3455 })
3456 } else if indent_from_prev_row {
3457 Some(IndentSuggestion {
3458 basis_row: prev_row,
3459 delta: Ordering::Greater,
3460 within_error: within_error && !from_regex,
3461 })
3462 } else if outdent_to_row < prev_row {
3463 Some(IndentSuggestion {
3464 basis_row: outdent_to_row,
3465 delta: Ordering::Equal,
3466 within_error: within_error && !from_regex,
3467 })
3468 } else if outdent_from_prev_row {
3469 Some(IndentSuggestion {
3470 basis_row: prev_row,
3471 delta: Ordering::Less,
3472 within_error: within_error && !from_regex,
3473 })
3474 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3475 {
3476 Some(IndentSuggestion {
3477 basis_row: prev_row,
3478 delta: Ordering::Equal,
3479 within_error: within_error && !from_regex,
3480 })
3481 } else {
3482 None
3483 };
3484
3485 prev_row = row;
3486 prev_row_start = row_start;
3487 suggestion
3488 }))
3489 }
3490
3491 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3492 while row > 0 {
3493 row -= 1;
3494 if !self.is_line_blank(row) {
3495 return Some(row);
3496 }
3497 }
3498 None
3499 }
3500
3501 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3502 let captures = self.syntax.captures(range, &self.text, |grammar| {
3503 grammar
3504 .highlights_config
3505 .as_ref()
3506 .map(|config| &config.query)
3507 });
3508 let highlight_maps = captures
3509 .grammars()
3510 .iter()
3511 .map(|grammar| grammar.highlight_map())
3512 .collect();
3513 (captures, highlight_maps)
3514 }
3515
3516 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3517 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3518 /// returned in chunks where each chunk has a single syntax highlighting style and
3519 /// diagnostic status.
3520 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3521 let range = range.start.to_offset(self)..range.end.to_offset(self);
3522
3523 let mut syntax = None;
3524 if language_aware {
3525 syntax = Some(self.get_highlights(range.clone()));
3526 }
3527 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3528 let diagnostics = language_aware;
3529 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3530 }
3531
3532 pub fn highlighted_text_for_range<T: ToOffset>(
3533 &self,
3534 range: Range<T>,
3535 override_style: Option<HighlightStyle>,
3536 syntax_theme: &SyntaxTheme,
3537 ) -> HighlightedText {
3538 HighlightedText::from_buffer_range(
3539 range,
3540 &self.text,
3541 &self.syntax,
3542 override_style,
3543 syntax_theme,
3544 )
3545 }
3546
3547 /// Invokes the given callback for each line of text in the given range of the buffer.
3548 /// Uses callback to avoid allocating a string for each line.
3549 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3550 let mut line = String::new();
3551 let mut row = range.start.row;
3552 for chunk in self
3553 .as_rope()
3554 .chunks_in_range(range.to_offset(self))
3555 .chain(["\n"])
3556 {
3557 for (newline_ix, text) in chunk.split('\n').enumerate() {
3558 if newline_ix > 0 {
3559 callback(row, &line);
3560 row += 1;
3561 line.clear();
3562 }
3563 line.push_str(text);
3564 }
3565 }
3566 }
3567
3568 /// Iterates over every [`SyntaxLayer`] in the buffer.
3569 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3570 self.syntax_layers_for_range(0..self.len(), true)
3571 }
3572
3573 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3574 let offset = position.to_offset(self);
3575 self.syntax_layers_for_range(offset..offset, false)
3576 .filter(|l| {
3577 if let Some(ranges) = l.included_sub_ranges {
3578 ranges.iter().any(|range| {
3579 let start = range.start.to_offset(self);
3580 start <= offset && {
3581 let end = range.end.to_offset(self);
3582 offset < end
3583 }
3584 })
3585 } else {
3586 l.node().start_byte() <= offset && l.node().end_byte() > offset
3587 }
3588 })
3589 .last()
3590 }
3591
3592 pub fn syntax_layers_for_range<D: ToOffset>(
3593 &self,
3594 range: Range<D>,
3595 include_hidden: bool,
3596 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3597 self.syntax
3598 .layers_for_range(range, &self.text, include_hidden)
3599 }
3600
3601 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3602 &self,
3603 range: Range<D>,
3604 ) -> Option<SyntaxLayer<'_>> {
3605 let range = range.to_offset(self);
3606 self.syntax
3607 .layers_for_range(range, &self.text, false)
3608 .max_by(|a, b| {
3609 if a.depth != b.depth {
3610 a.depth.cmp(&b.depth)
3611 } else if a.offset.0 != b.offset.0 {
3612 a.offset.0.cmp(&b.offset.0)
3613 } else {
3614 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3615 }
3616 })
3617 }
3618
3619 /// Returns the main [`Language`].
3620 pub fn language(&self) -> Option<&Arc<Language>> {
3621 self.language.as_ref()
3622 }
3623
3624 /// Returns the [`Language`] at the given location.
3625 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3626 self.syntax_layer_at(position)
3627 .map(|info| info.language)
3628 .or(self.language.as_ref())
3629 }
3630
3631 /// Returns the settings for the language at the given location.
3632 pub fn settings_at<'a, D: ToOffset>(
3633 &'a self,
3634 position: D,
3635 cx: &'a App,
3636 ) -> Cow<'a, LanguageSettings> {
3637 language_settings(
3638 self.language_at(position).map(|l| l.name()),
3639 self.file.as_ref(),
3640 cx,
3641 )
3642 }
3643
3644 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3645 CharClassifier::new(self.language_scope_at(point))
3646 }
3647
3648 /// Returns the [`LanguageScope`] at the given location.
3649 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3650 let offset = position.to_offset(self);
3651 let mut scope = None;
3652 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3653
3654 // Use the layer that has the smallest node intersecting the given point.
3655 for layer in self
3656 .syntax
3657 .layers_for_range(offset..offset, &self.text, false)
3658 {
3659 let mut cursor = layer.node().walk();
3660
3661 let mut range = None;
3662 loop {
3663 let child_range = cursor.node().byte_range();
3664 if !child_range.contains(&offset) {
3665 break;
3666 }
3667
3668 range = Some(child_range);
3669 if cursor.goto_first_child_for_byte(offset).is_none() {
3670 break;
3671 }
3672 }
3673
3674 if let Some(range) = range
3675 && smallest_range_and_depth.as_ref().is_none_or(
3676 |(smallest_range, smallest_range_depth)| {
3677 if layer.depth > *smallest_range_depth {
3678 true
3679 } else if layer.depth == *smallest_range_depth {
3680 range.len() < smallest_range.len()
3681 } else {
3682 false
3683 }
3684 },
3685 )
3686 {
3687 smallest_range_and_depth = Some((range, layer.depth));
3688 scope = Some(LanguageScope {
3689 language: layer.language.clone(),
3690 override_id: layer.override_id(offset, &self.text),
3691 });
3692 }
3693 }
3694
3695 scope.or_else(|| {
3696 self.language.clone().map(|language| LanguageScope {
3697 language,
3698 override_id: None,
3699 })
3700 })
3701 }
3702
3703 /// Returns a tuple of the range and character kind of the word
3704 /// surrounding the given position.
3705 pub fn surrounding_word<T: ToOffset>(
3706 &self,
3707 start: T,
3708 scope_context: Option<CharScopeContext>,
3709 ) -> (Range<usize>, Option<CharKind>) {
3710 let mut start = start.to_offset(self);
3711 let mut end = start;
3712 let mut next_chars = self.chars_at(start).take(128).peekable();
3713 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3714
3715 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3716 let word_kind = cmp::max(
3717 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3718 next_chars.peek().copied().map(|c| classifier.kind(c)),
3719 );
3720
3721 for ch in prev_chars {
3722 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3723 start -= ch.len_utf8();
3724 } else {
3725 break;
3726 }
3727 }
3728
3729 for ch in next_chars {
3730 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3731 end += ch.len_utf8();
3732 } else {
3733 break;
3734 }
3735 }
3736
3737 (start..end, word_kind)
3738 }
3739
3740 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3741 /// range. When `require_larger` is true, the node found must be larger than the query range.
3742 ///
3743 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3744 /// be moved to the root of the tree.
3745 fn goto_node_enclosing_range(
3746 cursor: &mut tree_sitter::TreeCursor,
3747 query_range: &Range<usize>,
3748 require_larger: bool,
3749 ) -> bool {
3750 let mut ascending = false;
3751 loop {
3752 let mut range = cursor.node().byte_range();
3753 if query_range.is_empty() {
3754 // When the query range is empty and the current node starts after it, move to the
3755 // previous sibling to find the node the containing node.
3756 if range.start > query_range.start {
3757 cursor.goto_previous_sibling();
3758 range = cursor.node().byte_range();
3759 }
3760 } else {
3761 // When the query range is non-empty and the current node ends exactly at the start,
3762 // move to the next sibling to find a node that extends beyond the start.
3763 if range.end == query_range.start {
3764 cursor.goto_next_sibling();
3765 range = cursor.node().byte_range();
3766 }
3767 }
3768
3769 let encloses = range.contains_inclusive(query_range)
3770 && (!require_larger || range.len() > query_range.len());
3771 if !encloses {
3772 ascending = true;
3773 if !cursor.goto_parent() {
3774 return false;
3775 }
3776 continue;
3777 } else if ascending {
3778 return true;
3779 }
3780
3781 // Descend into the current node.
3782 if cursor
3783 .goto_first_child_for_byte(query_range.start)
3784 .is_none()
3785 {
3786 return true;
3787 }
3788 }
3789 }
3790
3791 pub fn syntax_ancestor<'a, T: ToOffset>(
3792 &'a self,
3793 range: Range<T>,
3794 ) -> Option<tree_sitter::Node<'a>> {
3795 let range = range.start.to_offset(self)..range.end.to_offset(self);
3796 let mut result: Option<tree_sitter::Node<'a>> = None;
3797 for layer in self
3798 .syntax
3799 .layers_for_range(range.clone(), &self.text, true)
3800 {
3801 let mut cursor = layer.node().walk();
3802
3803 // Find the node that both contains the range and is larger than it.
3804 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3805 continue;
3806 }
3807
3808 let left_node = cursor.node();
3809 let mut layer_result = left_node;
3810
3811 // For an empty range, try to find another node immediately to the right of the range.
3812 if left_node.end_byte() == range.start {
3813 let mut right_node = None;
3814 while !cursor.goto_next_sibling() {
3815 if !cursor.goto_parent() {
3816 break;
3817 }
3818 }
3819
3820 while cursor.node().start_byte() == range.start {
3821 right_node = Some(cursor.node());
3822 if !cursor.goto_first_child() {
3823 break;
3824 }
3825 }
3826
3827 // If there is a candidate node on both sides of the (empty) range, then
3828 // decide between the two by favoring a named node over an anonymous token.
3829 // If both nodes are the same in that regard, favor the right one.
3830 if let Some(right_node) = right_node
3831 && (right_node.is_named() || !left_node.is_named())
3832 {
3833 layer_result = right_node;
3834 }
3835 }
3836
3837 if let Some(previous_result) = &result
3838 && previous_result.byte_range().len() < layer_result.byte_range().len()
3839 {
3840 continue;
3841 }
3842 result = Some(layer_result);
3843 }
3844
3845 result
3846 }
3847
3848 /// Find the previous sibling syntax node at the given range.
3849 ///
3850 /// This function locates the syntax node that precedes the node containing
3851 /// the given range. It searches hierarchically by:
3852 /// 1. Finding the node that contains the given range
3853 /// 2. Looking for the previous sibling at the same tree level
3854 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3855 ///
3856 /// Returns `None` if there is no previous sibling at any ancestor level.
3857 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3858 &'a self,
3859 range: Range<T>,
3860 ) -> Option<tree_sitter::Node<'a>> {
3861 let range = range.start.to_offset(self)..range.end.to_offset(self);
3862 let mut result: Option<tree_sitter::Node<'a>> = None;
3863
3864 for layer in self
3865 .syntax
3866 .layers_for_range(range.clone(), &self.text, true)
3867 {
3868 let mut cursor = layer.node().walk();
3869
3870 // Find the node that contains the range
3871 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3872 continue;
3873 }
3874
3875 // Look for the previous sibling, moving up ancestor levels if needed
3876 loop {
3877 if cursor.goto_previous_sibling() {
3878 let layer_result = cursor.node();
3879
3880 if let Some(previous_result) = &result {
3881 if previous_result.byte_range().end < layer_result.byte_range().end {
3882 continue;
3883 }
3884 }
3885 result = Some(layer_result);
3886 break;
3887 }
3888
3889 // No sibling found at this level, try moving up to parent
3890 if !cursor.goto_parent() {
3891 break;
3892 }
3893 }
3894 }
3895
3896 result
3897 }
3898
3899 /// Find the next sibling syntax node at the given range.
3900 ///
3901 /// This function locates the syntax node that follows the node containing
3902 /// the given range. It searches hierarchically by:
3903 /// 1. Finding the node that contains the given range
3904 /// 2. Looking for the next sibling at the same tree level
3905 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3906 ///
3907 /// Returns `None` if there is no next sibling at any ancestor level.
3908 pub fn syntax_next_sibling<'a, T: ToOffset>(
3909 &'a self,
3910 range: Range<T>,
3911 ) -> Option<tree_sitter::Node<'a>> {
3912 let range = range.start.to_offset(self)..range.end.to_offset(self);
3913 let mut result: Option<tree_sitter::Node<'a>> = None;
3914
3915 for layer in self
3916 .syntax
3917 .layers_for_range(range.clone(), &self.text, true)
3918 {
3919 let mut cursor = layer.node().walk();
3920
3921 // Find the node that contains the range
3922 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3923 continue;
3924 }
3925
3926 // Look for the next sibling, moving up ancestor levels if needed
3927 loop {
3928 if cursor.goto_next_sibling() {
3929 let layer_result = cursor.node();
3930
3931 if let Some(previous_result) = &result {
3932 if previous_result.byte_range().start > layer_result.byte_range().start {
3933 continue;
3934 }
3935 }
3936 result = Some(layer_result);
3937 break;
3938 }
3939
3940 // No sibling found at this level, try moving up to parent
3941 if !cursor.goto_parent() {
3942 break;
3943 }
3944 }
3945 }
3946
3947 result
3948 }
3949
3950 /// Returns the root syntax node within the given row
3951 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3952 let start_offset = position.to_offset(self);
3953
3954 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3955
3956 let layer = self
3957 .syntax
3958 .layers_for_range(start_offset..start_offset, &self.text, true)
3959 .next()?;
3960
3961 let mut cursor = layer.node().walk();
3962
3963 // Descend to the first leaf that touches the start of the range.
3964 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3965 if cursor.node().end_byte() == start_offset {
3966 cursor.goto_next_sibling();
3967 }
3968 }
3969
3970 // Ascend to the root node within the same row.
3971 while cursor.goto_parent() {
3972 if cursor.node().start_position().row != row {
3973 break;
3974 }
3975 }
3976
3977 Some(cursor.node())
3978 }
3979
3980 /// Returns the outline for the buffer.
3981 ///
3982 /// This method allows passing an optional [`SyntaxTheme`] to
3983 /// syntax-highlight the returned symbols.
3984 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3985 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3986 }
3987
3988 /// Returns all the symbols that contain the given position.
3989 ///
3990 /// This method allows passing an optional [`SyntaxTheme`] to
3991 /// syntax-highlight the returned symbols.
3992 pub fn symbols_containing<T: ToOffset>(
3993 &self,
3994 position: T,
3995 theme: Option<&SyntaxTheme>,
3996 ) -> Vec<OutlineItem<Anchor>> {
3997 let position = position.to_offset(self);
3998 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3999 let end = self.clip_offset(position + 1, Bias::Right);
4000 let mut items = self.outline_items_containing(start..end, false, theme);
4001 let mut prev_depth = None;
4002 items.retain(|item| {
4003 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4004 prev_depth = Some(item.depth);
4005 result
4006 });
4007 items
4008 }
4009
4010 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4011 let range = range.to_offset(self);
4012 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4013 grammar.outline_config.as_ref().map(|c| &c.query)
4014 });
4015 let configs = matches
4016 .grammars()
4017 .iter()
4018 .map(|g| g.outline_config.as_ref().unwrap())
4019 .collect::<Vec<_>>();
4020
4021 while let Some(mat) = matches.peek() {
4022 let config = &configs[mat.grammar_index];
4023 let containing_item_node = maybe!({
4024 let item_node = mat.captures.iter().find_map(|cap| {
4025 if cap.index == config.item_capture_ix {
4026 Some(cap.node)
4027 } else {
4028 None
4029 }
4030 })?;
4031
4032 let item_byte_range = item_node.byte_range();
4033 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4034 None
4035 } else {
4036 Some(item_node)
4037 }
4038 });
4039
4040 if let Some(item_node) = containing_item_node {
4041 return Some(
4042 Point::from_ts_point(item_node.start_position())
4043 ..Point::from_ts_point(item_node.end_position()),
4044 );
4045 }
4046
4047 matches.advance();
4048 }
4049 None
4050 }
4051
4052 pub fn outline_items_containing<T: ToOffset>(
4053 &self,
4054 range: Range<T>,
4055 include_extra_context: bool,
4056 theme: Option<&SyntaxTheme>,
4057 ) -> Vec<OutlineItem<Anchor>> {
4058 self.outline_items_containing_internal(
4059 range,
4060 include_extra_context,
4061 theme,
4062 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4063 )
4064 }
4065
4066 pub fn outline_items_as_points_containing<T: ToOffset>(
4067 &self,
4068 range: Range<T>,
4069 include_extra_context: bool,
4070 theme: Option<&SyntaxTheme>,
4071 ) -> Vec<OutlineItem<Point>> {
4072 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4073 range
4074 })
4075 }
4076
4077 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4078 &self,
4079 range: Range<T>,
4080 include_extra_context: bool,
4081 theme: Option<&SyntaxTheme>,
4082 ) -> Vec<OutlineItem<usize>> {
4083 self.outline_items_containing_internal(
4084 range,
4085 include_extra_context,
4086 theme,
4087 |buffer, range| range.to_offset(buffer),
4088 )
4089 }
4090
4091 fn outline_items_containing_internal<T: ToOffset, U>(
4092 &self,
4093 range: Range<T>,
4094 include_extra_context: bool,
4095 theme: Option<&SyntaxTheme>,
4096 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4097 ) -> Vec<OutlineItem<U>> {
4098 let range = range.to_offset(self);
4099 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4100 grammar.outline_config.as_ref().map(|c| &c.query)
4101 });
4102
4103 let mut items = Vec::new();
4104 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4105 while let Some(mat) = matches.peek() {
4106 let config = matches.grammars()[mat.grammar_index]
4107 .outline_config
4108 .as_ref()
4109 .unwrap();
4110 if let Some(item) =
4111 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4112 {
4113 items.push(item);
4114 } else if let Some(capture) = mat
4115 .captures
4116 .iter()
4117 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4118 {
4119 let capture_range = capture.node.start_position()..capture.node.end_position();
4120 let mut capture_row_range =
4121 capture_range.start.row as u32..capture_range.end.row as u32;
4122 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4123 {
4124 capture_row_range.end -= 1;
4125 }
4126 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4127 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4128 last_row_range.end = capture_row_range.end;
4129 } else {
4130 annotation_row_ranges.push(capture_row_range);
4131 }
4132 } else {
4133 annotation_row_ranges.push(capture_row_range);
4134 }
4135 }
4136 matches.advance();
4137 }
4138
4139 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4140
4141 // Assign depths based on containment relationships and convert to anchors.
4142 let mut item_ends_stack = Vec::<Point>::new();
4143 let mut anchor_items = Vec::new();
4144 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4145 for item in items {
4146 while let Some(last_end) = item_ends_stack.last().copied() {
4147 if last_end < item.range.end {
4148 item_ends_stack.pop();
4149 } else {
4150 break;
4151 }
4152 }
4153
4154 let mut annotation_row_range = None;
4155 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4156 let row_preceding_item = item.range.start.row.saturating_sub(1);
4157 if next_annotation_row_range.end < row_preceding_item {
4158 annotation_row_ranges.next();
4159 } else {
4160 if next_annotation_row_range.end == row_preceding_item {
4161 annotation_row_range = Some(next_annotation_row_range.clone());
4162 annotation_row_ranges.next();
4163 }
4164 break;
4165 }
4166 }
4167
4168 anchor_items.push(OutlineItem {
4169 depth: item_ends_stack.len(),
4170 range: range_callback(self, item.range.clone()),
4171 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4172 text: item.text,
4173 highlight_ranges: item.highlight_ranges,
4174 name_ranges: item.name_ranges,
4175 body_range: item.body_range.map(|r| range_callback(self, r)),
4176 annotation_range: annotation_row_range.map(|annotation_range| {
4177 let point_range = Point::new(annotation_range.start, 0)
4178 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4179 range_callback(self, point_range)
4180 }),
4181 });
4182 item_ends_stack.push(item.range.end);
4183 }
4184
4185 anchor_items
4186 }
4187
4188 fn next_outline_item(
4189 &self,
4190 config: &OutlineConfig,
4191 mat: &SyntaxMapMatch,
4192 range: &Range<usize>,
4193 include_extra_context: bool,
4194 theme: Option<&SyntaxTheme>,
4195 ) -> Option<OutlineItem<Point>> {
4196 let item_node = mat.captures.iter().find_map(|cap| {
4197 if cap.index == config.item_capture_ix {
4198 Some(cap.node)
4199 } else {
4200 None
4201 }
4202 })?;
4203
4204 let item_byte_range = item_node.byte_range();
4205 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4206 return None;
4207 }
4208 let item_point_range = Point::from_ts_point(item_node.start_position())
4209 ..Point::from_ts_point(item_node.end_position());
4210
4211 let mut open_point = None;
4212 let mut close_point = None;
4213
4214 let mut buffer_ranges = Vec::new();
4215 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4216 let mut range = node.start_byte()..node.end_byte();
4217 let start = node.start_position();
4218 if node.end_position().row > start.row {
4219 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4220 }
4221
4222 if !range.is_empty() {
4223 buffer_ranges.push((range, node_is_name));
4224 }
4225 };
4226
4227 for capture in mat.captures {
4228 if capture.index == config.name_capture_ix {
4229 add_to_buffer_ranges(capture.node, true);
4230 } else if Some(capture.index) == config.context_capture_ix
4231 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4232 {
4233 add_to_buffer_ranges(capture.node, false);
4234 } else {
4235 if Some(capture.index) == config.open_capture_ix {
4236 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4237 } else if Some(capture.index) == config.close_capture_ix {
4238 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4239 }
4240 }
4241 }
4242
4243 if buffer_ranges.is_empty() {
4244 return None;
4245 }
4246 let source_range_for_text =
4247 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4248
4249 let mut text = String::new();
4250 let mut highlight_ranges = Vec::new();
4251 let mut name_ranges = Vec::new();
4252 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4253 let mut last_buffer_range_end = 0;
4254 for (buffer_range, is_name) in buffer_ranges {
4255 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4256 if space_added {
4257 text.push(' ');
4258 }
4259 let before_append_len = text.len();
4260 let mut offset = buffer_range.start;
4261 chunks.seek(buffer_range.clone());
4262 for mut chunk in chunks.by_ref() {
4263 if chunk.text.len() > buffer_range.end - offset {
4264 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4265 offset = buffer_range.end;
4266 } else {
4267 offset += chunk.text.len();
4268 }
4269 let style = chunk
4270 .syntax_highlight_id
4271 .zip(theme)
4272 .and_then(|(highlight, theme)| highlight.style(theme));
4273 if let Some(style) = style {
4274 let start = text.len();
4275 let end = start + chunk.text.len();
4276 highlight_ranges.push((start..end, style));
4277 }
4278 text.push_str(chunk.text);
4279 if offset >= buffer_range.end {
4280 break;
4281 }
4282 }
4283 if is_name {
4284 let after_append_len = text.len();
4285 let start = if space_added && !name_ranges.is_empty() {
4286 before_append_len - 1
4287 } else {
4288 before_append_len
4289 };
4290 name_ranges.push(start..after_append_len);
4291 }
4292 last_buffer_range_end = buffer_range.end;
4293 }
4294
4295 Some(OutlineItem {
4296 depth: 0, // We'll calculate the depth later
4297 range: item_point_range,
4298 source_range_for_text: source_range_for_text.to_point(self),
4299 text,
4300 highlight_ranges,
4301 name_ranges,
4302 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4303 annotation_range: None,
4304 })
4305 }
4306
4307 pub fn function_body_fold_ranges<T: ToOffset>(
4308 &self,
4309 within: Range<T>,
4310 ) -> impl Iterator<Item = Range<usize>> + '_ {
4311 self.text_object_ranges(within, TreeSitterOptions::default())
4312 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4313 }
4314
4315 /// For each grammar in the language, runs the provided
4316 /// [`tree_sitter::Query`] against the given range.
4317 pub fn matches(
4318 &self,
4319 range: Range<usize>,
4320 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4321 ) -> SyntaxMapMatches<'_> {
4322 self.syntax.matches(range, self, query)
4323 }
4324
4325 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4326 /// Hence, may return more bracket pairs than the range contains.
4327 ///
4328 /// Will omit known chunks.
4329 /// The resulting bracket match collections are not ordered.
4330 pub fn fetch_bracket_ranges(
4331 &self,
4332 range: Range<usize>,
4333 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4334 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4335 let mut all_bracket_matches = HashMap::default();
4336
4337 for chunk in self
4338 .tree_sitter_data
4339 .chunks
4340 .applicable_chunks(&[range.to_point(self)])
4341 {
4342 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4343 continue;
4344 }
4345 let chunk_range = chunk.anchor_range();
4346 let chunk_range = chunk_range.to_offset(&self);
4347
4348 if let Some(cached_brackets) =
4349 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4350 {
4351 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4352 continue;
4353 }
4354
4355 let mut all_brackets = Vec::new();
4356 let mut opens = Vec::new();
4357 let mut color_pairs = Vec::new();
4358
4359 let mut matches = self.syntax.matches_with_options(
4360 chunk_range.clone(),
4361 &self.text,
4362 TreeSitterOptions {
4363 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4364 max_start_depth: None,
4365 },
4366 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4367 );
4368 let configs = matches
4369 .grammars()
4370 .iter()
4371 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4372 .collect::<Vec<_>>();
4373
4374 while let Some(mat) = matches.peek() {
4375 let mut open = None;
4376 let mut close = None;
4377 let syntax_layer_depth = mat.depth;
4378 let config = configs[mat.grammar_index];
4379 let pattern = &config.patterns[mat.pattern_index];
4380 for capture in mat.captures {
4381 if capture.index == config.open_capture_ix {
4382 open = Some(capture.node.byte_range());
4383 } else if capture.index == config.close_capture_ix {
4384 close = Some(capture.node.byte_range());
4385 }
4386 }
4387
4388 matches.advance();
4389
4390 let Some((open_range, close_range)) = open.zip(close) else {
4391 continue;
4392 };
4393
4394 let bracket_range = open_range.start..=close_range.end;
4395 if !bracket_range.overlaps(&chunk_range) {
4396 continue;
4397 }
4398
4399 let index = all_brackets.len();
4400 all_brackets.push(BracketMatch {
4401 open_range: open_range.clone(),
4402 close_range: close_range.clone(),
4403 newline_only: pattern.newline_only,
4404 syntax_layer_depth,
4405 color_index: None,
4406 });
4407
4408 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4409 // bracket will match the entire tag with all text inside.
4410 // For now, avoid highlighting any pair that has more than single char in each bracket.
4411 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4412 let should_color =
4413 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4414 if should_color {
4415 opens.push(open_range.clone());
4416 color_pairs.push((open_range, close_range, index));
4417 }
4418 }
4419
4420 opens.sort_by_key(|r| (r.start, r.end));
4421 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4422 color_pairs.sort_by_key(|(_, close, _)| close.end);
4423
4424 let mut open_stack = Vec::new();
4425 let mut open_index = 0;
4426 for (open, close, index) in color_pairs {
4427 while open_index < opens.len() && opens[open_index].start < close.start {
4428 open_stack.push(opens[open_index].clone());
4429 open_index += 1;
4430 }
4431
4432 if open_stack.last() == Some(&open) {
4433 let depth_index = open_stack.len() - 1;
4434 all_brackets[index].color_index = Some(depth_index);
4435 open_stack.pop();
4436 }
4437 }
4438
4439 all_brackets.sort_by_key(|bracket_match| {
4440 (bracket_match.open_range.start, bracket_match.open_range.end)
4441 });
4442
4443 if let empty_slot @ None =
4444 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4445 {
4446 *empty_slot = Some(all_brackets.clone());
4447 }
4448 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4449 }
4450
4451 all_bracket_matches
4452 }
4453
4454 pub fn all_bracket_ranges(
4455 &self,
4456 range: Range<usize>,
4457 ) -> impl Iterator<Item = BracketMatch<usize>> {
4458 self.fetch_bracket_ranges(range.clone(), None)
4459 .into_values()
4460 .flatten()
4461 .filter(move |bracket_match| {
4462 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4463 bracket_range.overlaps(&range)
4464 })
4465 }
4466
4467 /// Returns bracket range pairs overlapping or adjacent to `range`
4468 pub fn bracket_ranges<T: ToOffset>(
4469 &self,
4470 range: Range<T>,
4471 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4472 // Find bracket pairs that *inclusively* contain the given range.
4473 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4474 self.all_bracket_ranges(range)
4475 .filter(|pair| !pair.newline_only)
4476 }
4477
4478 pub fn debug_variables_query<T: ToOffset>(
4479 &self,
4480 range: Range<T>,
4481 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4482 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4483
4484 let mut matches = self.syntax.matches_with_options(
4485 range.clone(),
4486 &self.text,
4487 TreeSitterOptions::default(),
4488 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4489 );
4490
4491 let configs = matches
4492 .grammars()
4493 .iter()
4494 .map(|grammar| grammar.debug_variables_config.as_ref())
4495 .collect::<Vec<_>>();
4496
4497 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4498
4499 iter::from_fn(move || {
4500 loop {
4501 while let Some(capture) = captures.pop() {
4502 if capture.0.overlaps(&range) {
4503 return Some(capture);
4504 }
4505 }
4506
4507 let mat = matches.peek()?;
4508
4509 let Some(config) = configs[mat.grammar_index].as_ref() else {
4510 matches.advance();
4511 continue;
4512 };
4513
4514 for capture in mat.captures {
4515 let Some(ix) = config
4516 .objects_by_capture_ix
4517 .binary_search_by_key(&capture.index, |e| e.0)
4518 .ok()
4519 else {
4520 continue;
4521 };
4522 let text_object = config.objects_by_capture_ix[ix].1;
4523 let byte_range = capture.node.byte_range();
4524
4525 let mut found = false;
4526 for (range, existing) in captures.iter_mut() {
4527 if existing == &text_object {
4528 range.start = range.start.min(byte_range.start);
4529 range.end = range.end.max(byte_range.end);
4530 found = true;
4531 break;
4532 }
4533 }
4534
4535 if !found {
4536 captures.push((byte_range, text_object));
4537 }
4538 }
4539
4540 matches.advance();
4541 }
4542 })
4543 }
4544
4545 pub fn text_object_ranges<T: ToOffset>(
4546 &self,
4547 range: Range<T>,
4548 options: TreeSitterOptions,
4549 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4550 let range =
4551 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4552
4553 let mut matches =
4554 self.syntax
4555 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4556 grammar.text_object_config.as_ref().map(|c| &c.query)
4557 });
4558
4559 let configs = matches
4560 .grammars()
4561 .iter()
4562 .map(|grammar| grammar.text_object_config.as_ref())
4563 .collect::<Vec<_>>();
4564
4565 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4566
4567 iter::from_fn(move || {
4568 loop {
4569 while let Some(capture) = captures.pop() {
4570 if capture.0.overlaps(&range) {
4571 return Some(capture);
4572 }
4573 }
4574
4575 let mat = matches.peek()?;
4576
4577 let Some(config) = configs[mat.grammar_index].as_ref() else {
4578 matches.advance();
4579 continue;
4580 };
4581
4582 for capture in mat.captures {
4583 let Some(ix) = config
4584 .text_objects_by_capture_ix
4585 .binary_search_by_key(&capture.index, |e| e.0)
4586 .ok()
4587 else {
4588 continue;
4589 };
4590 let text_object = config.text_objects_by_capture_ix[ix].1;
4591 let byte_range = capture.node.byte_range();
4592
4593 let mut found = false;
4594 for (range, existing) in captures.iter_mut() {
4595 if existing == &text_object {
4596 range.start = range.start.min(byte_range.start);
4597 range.end = range.end.max(byte_range.end);
4598 found = true;
4599 break;
4600 }
4601 }
4602
4603 if !found {
4604 captures.push((byte_range, text_object));
4605 }
4606 }
4607
4608 matches.advance();
4609 }
4610 })
4611 }
4612
4613 /// Returns enclosing bracket ranges containing the given range
4614 pub fn enclosing_bracket_ranges<T: ToOffset>(
4615 &self,
4616 range: Range<T>,
4617 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4618 let range = range.start.to_offset(self)..range.end.to_offset(self);
4619
4620 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4621 let max_depth = result
4622 .iter()
4623 .map(|mat| mat.syntax_layer_depth)
4624 .max()
4625 .unwrap_or(0);
4626 result.into_iter().filter(move |pair| {
4627 pair.open_range.start <= range.start
4628 && pair.close_range.end >= range.end
4629 && pair.syntax_layer_depth == max_depth
4630 })
4631 }
4632
4633 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4634 ///
4635 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4636 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4637 &self,
4638 range: Range<T>,
4639 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4640 ) -> Option<(Range<usize>, Range<usize>)> {
4641 let range = range.start.to_offset(self)..range.end.to_offset(self);
4642
4643 // Get the ranges of the innermost pair of brackets.
4644 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4645
4646 for pair in self.enclosing_bracket_ranges(range) {
4647 if let Some(range_filter) = range_filter
4648 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4649 {
4650 continue;
4651 }
4652
4653 let len = pair.close_range.end - pair.open_range.start;
4654
4655 if let Some((existing_open, existing_close)) = &result {
4656 let existing_len = existing_close.end - existing_open.start;
4657 if len > existing_len {
4658 continue;
4659 }
4660 }
4661
4662 result = Some((pair.open_range, pair.close_range));
4663 }
4664
4665 result
4666 }
4667
4668 /// Returns anchor ranges for any matches of the redaction query.
4669 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4670 /// will be run on the relevant section of the buffer.
4671 pub fn redacted_ranges<T: ToOffset>(
4672 &self,
4673 range: Range<T>,
4674 ) -> impl Iterator<Item = Range<usize>> + '_ {
4675 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4676 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4677 grammar
4678 .redactions_config
4679 .as_ref()
4680 .map(|config| &config.query)
4681 });
4682
4683 let configs = syntax_matches
4684 .grammars()
4685 .iter()
4686 .map(|grammar| grammar.redactions_config.as_ref())
4687 .collect::<Vec<_>>();
4688
4689 iter::from_fn(move || {
4690 let redacted_range = syntax_matches
4691 .peek()
4692 .and_then(|mat| {
4693 configs[mat.grammar_index].and_then(|config| {
4694 mat.captures
4695 .iter()
4696 .find(|capture| capture.index == config.redaction_capture_ix)
4697 })
4698 })
4699 .map(|mat| mat.node.byte_range());
4700 syntax_matches.advance();
4701 redacted_range
4702 })
4703 }
4704
4705 pub fn injections_intersecting_range<T: ToOffset>(
4706 &self,
4707 range: Range<T>,
4708 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4709 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4710
4711 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4712 grammar
4713 .injection_config
4714 .as_ref()
4715 .map(|config| &config.query)
4716 });
4717
4718 let configs = syntax_matches
4719 .grammars()
4720 .iter()
4721 .map(|grammar| grammar.injection_config.as_ref())
4722 .collect::<Vec<_>>();
4723
4724 iter::from_fn(move || {
4725 let ranges = syntax_matches.peek().and_then(|mat| {
4726 let config = &configs[mat.grammar_index]?;
4727 let content_capture_range = mat.captures.iter().find_map(|capture| {
4728 if capture.index == config.content_capture_ix {
4729 Some(capture.node.byte_range())
4730 } else {
4731 None
4732 }
4733 })?;
4734 let language = self.language_at(content_capture_range.start)?;
4735 Some((content_capture_range, language))
4736 });
4737 syntax_matches.advance();
4738 ranges
4739 })
4740 }
4741
4742 pub fn runnable_ranges(
4743 &self,
4744 offset_range: Range<usize>,
4745 ) -> impl Iterator<Item = RunnableRange> + '_ {
4746 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4747 grammar.runnable_config.as_ref().map(|config| &config.query)
4748 });
4749
4750 let test_configs = syntax_matches
4751 .grammars()
4752 .iter()
4753 .map(|grammar| grammar.runnable_config.as_ref())
4754 .collect::<Vec<_>>();
4755
4756 iter::from_fn(move || {
4757 loop {
4758 let mat = syntax_matches.peek()?;
4759
4760 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4761 let mut run_range = None;
4762 let full_range = mat.captures.iter().fold(
4763 Range {
4764 start: usize::MAX,
4765 end: 0,
4766 },
4767 |mut acc, next| {
4768 let byte_range = next.node.byte_range();
4769 if acc.start > byte_range.start {
4770 acc.start = byte_range.start;
4771 }
4772 if acc.end < byte_range.end {
4773 acc.end = byte_range.end;
4774 }
4775 acc
4776 },
4777 );
4778 if full_range.start > full_range.end {
4779 // We did not find a full spanning range of this match.
4780 return None;
4781 }
4782 let extra_captures: SmallVec<[_; 1]> =
4783 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4784 test_configs
4785 .extra_captures
4786 .get(capture.index as usize)
4787 .cloned()
4788 .and_then(|tag_name| match tag_name {
4789 RunnableCapture::Named(name) => {
4790 Some((capture.node.byte_range(), name))
4791 }
4792 RunnableCapture::Run => {
4793 let _ = run_range.insert(capture.node.byte_range());
4794 None
4795 }
4796 })
4797 }));
4798 let run_range = run_range?;
4799 let tags = test_configs
4800 .query
4801 .property_settings(mat.pattern_index)
4802 .iter()
4803 .filter_map(|property| {
4804 if *property.key == *"tag" {
4805 property
4806 .value
4807 .as_ref()
4808 .map(|value| RunnableTag(value.to_string().into()))
4809 } else {
4810 None
4811 }
4812 })
4813 .collect();
4814 let extra_captures = extra_captures
4815 .into_iter()
4816 .map(|(range, name)| {
4817 (
4818 name.to_string(),
4819 self.text_for_range(range).collect::<String>(),
4820 )
4821 })
4822 .collect();
4823 // All tags should have the same range.
4824 Some(RunnableRange {
4825 run_range,
4826 full_range,
4827 runnable: Runnable {
4828 tags,
4829 language: mat.language,
4830 buffer: self.remote_id(),
4831 },
4832 extra_captures,
4833 buffer_id: self.remote_id(),
4834 })
4835 });
4836
4837 syntax_matches.advance();
4838 if test_range.is_some() {
4839 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4840 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4841 return test_range;
4842 }
4843 }
4844 })
4845 }
4846
4847 /// Returns selections for remote peers intersecting the given range.
4848 #[allow(clippy::type_complexity)]
4849 pub fn selections_in_range(
4850 &self,
4851 range: Range<Anchor>,
4852 include_local: bool,
4853 ) -> impl Iterator<
4854 Item = (
4855 ReplicaId,
4856 bool,
4857 CursorShape,
4858 impl Iterator<Item = &Selection<Anchor>> + '_,
4859 ),
4860 > + '_ {
4861 self.remote_selections
4862 .iter()
4863 .filter(move |(replica_id, set)| {
4864 (include_local || **replica_id != self.text.replica_id())
4865 && !set.selections.is_empty()
4866 })
4867 .map(move |(replica_id, set)| {
4868 let start_ix = match set.selections.binary_search_by(|probe| {
4869 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4870 }) {
4871 Ok(ix) | Err(ix) => ix,
4872 };
4873 let end_ix = match set.selections.binary_search_by(|probe| {
4874 probe.start.cmp(&range.end, self).then(Ordering::Less)
4875 }) {
4876 Ok(ix) | Err(ix) => ix,
4877 };
4878
4879 (
4880 *replica_id,
4881 set.line_mode,
4882 set.cursor_shape,
4883 set.selections[start_ix..end_ix].iter(),
4884 )
4885 })
4886 }
4887
4888 /// Returns if the buffer contains any diagnostics.
4889 pub fn has_diagnostics(&self) -> bool {
4890 !self.diagnostics.is_empty()
4891 }
4892
4893 /// Returns all the diagnostics intersecting the given range.
4894 pub fn diagnostics_in_range<'a, T, O>(
4895 &'a self,
4896 search_range: Range<T>,
4897 reversed: bool,
4898 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4899 where
4900 T: 'a + Clone + ToOffset,
4901 O: 'a + FromAnchor,
4902 {
4903 let mut iterators: Vec<_> = self
4904 .diagnostics
4905 .iter()
4906 .map(|(_, collection)| {
4907 collection
4908 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4909 .peekable()
4910 })
4911 .collect();
4912
4913 std::iter::from_fn(move || {
4914 let (next_ix, _) = iterators
4915 .iter_mut()
4916 .enumerate()
4917 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4918 .min_by(|(_, a), (_, b)| {
4919 let cmp = a
4920 .range
4921 .start
4922 .cmp(&b.range.start, self)
4923 // when range is equal, sort by diagnostic severity
4924 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4925 // and stabilize order with group_id
4926 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4927 if reversed { cmp.reverse() } else { cmp }
4928 })?;
4929 iterators[next_ix]
4930 .next()
4931 .map(
4932 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4933 diagnostic,
4934 range: FromAnchor::from_anchor(&range.start, self)
4935 ..FromAnchor::from_anchor(&range.end, self),
4936 },
4937 )
4938 })
4939 }
4940
4941 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4942 /// should be used instead.
4943 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4944 &self.diagnostics
4945 }
4946
4947 /// Returns all the diagnostic groups associated with the given
4948 /// language server ID. If no language server ID is provided,
4949 /// all diagnostics groups are returned.
4950 pub fn diagnostic_groups(
4951 &self,
4952 language_server_id: Option<LanguageServerId>,
4953 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4954 let mut groups = Vec::new();
4955
4956 if let Some(language_server_id) = language_server_id {
4957 if let Ok(ix) = self
4958 .diagnostics
4959 .binary_search_by_key(&language_server_id, |e| e.0)
4960 {
4961 self.diagnostics[ix]
4962 .1
4963 .groups(language_server_id, &mut groups, self);
4964 }
4965 } else {
4966 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4967 diagnostics.groups(*language_server_id, &mut groups, self);
4968 }
4969 }
4970
4971 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4972 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4973 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4974 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4975 });
4976
4977 groups
4978 }
4979
4980 /// Returns an iterator over the diagnostics for the given group.
4981 pub fn diagnostic_group<O>(
4982 &self,
4983 group_id: usize,
4984 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4985 where
4986 O: FromAnchor + 'static,
4987 {
4988 self.diagnostics
4989 .iter()
4990 .flat_map(move |(_, set)| set.group(group_id, self))
4991 }
4992
4993 /// An integer version number that accounts for all updates besides
4994 /// the buffer's text itself (which is versioned via a version vector).
4995 pub fn non_text_state_update_count(&self) -> usize {
4996 self.non_text_state_update_count
4997 }
4998
4999 /// An integer version that changes when the buffer's syntax changes.
5000 pub fn syntax_update_count(&self) -> usize {
5001 self.syntax.update_count()
5002 }
5003
5004 /// Returns a snapshot of underlying file.
5005 pub fn file(&self) -> Option<&Arc<dyn File>> {
5006 self.file.as_ref()
5007 }
5008
5009 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5010 if let Some(file) = self.file() {
5011 if file.path().file_name().is_none() || include_root {
5012 Some(file.full_path(cx).to_string_lossy().into_owned())
5013 } else {
5014 Some(file.path().display(file.path_style(cx)).to_string())
5015 }
5016 } else {
5017 None
5018 }
5019 }
5020
5021 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5022 let query_str = query.fuzzy_contents;
5023 if query_str.is_some_and(|query| query.is_empty()) {
5024 return BTreeMap::default();
5025 }
5026
5027 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5028 language,
5029 override_id: None,
5030 }));
5031
5032 let mut query_ix = 0;
5033 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5034 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5035
5036 let mut words = BTreeMap::default();
5037 let mut current_word_start_ix = None;
5038 let mut chunk_ix = query.range.start;
5039 for chunk in self.chunks(query.range, false) {
5040 for (i, c) in chunk.text.char_indices() {
5041 let ix = chunk_ix + i;
5042 if classifier.is_word(c) {
5043 if current_word_start_ix.is_none() {
5044 current_word_start_ix = Some(ix);
5045 }
5046
5047 if let Some(query_chars) = &query_chars
5048 && query_ix < query_len
5049 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5050 {
5051 query_ix += 1;
5052 }
5053 continue;
5054 } else if let Some(word_start) = current_word_start_ix.take()
5055 && query_ix == query_len
5056 {
5057 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5058 let mut word_text = self.text_for_range(word_start..ix).peekable();
5059 let first_char = word_text
5060 .peek()
5061 .and_then(|first_chunk| first_chunk.chars().next());
5062 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5063 if !query.skip_digits
5064 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5065 {
5066 words.insert(word_text.collect(), word_range);
5067 }
5068 }
5069 query_ix = 0;
5070 }
5071 chunk_ix += chunk.text.len();
5072 }
5073
5074 words
5075 }
5076}
5077
5078pub struct WordsQuery<'a> {
5079 /// Only returns words with all chars from the fuzzy string in them.
5080 pub fuzzy_contents: Option<&'a str>,
5081 /// Skips words that start with a digit.
5082 pub skip_digits: bool,
5083 /// Buffer offset range, to look for words.
5084 pub range: Range<usize>,
5085}
5086
5087fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5088 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5089}
5090
5091fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5092 let mut result = IndentSize::spaces(0);
5093 for c in text {
5094 let kind = match c {
5095 ' ' => IndentKind::Space,
5096 '\t' => IndentKind::Tab,
5097 _ => break,
5098 };
5099 if result.len == 0 {
5100 result.kind = kind;
5101 }
5102 result.len += 1;
5103 }
5104 result
5105}
5106
5107impl Clone for BufferSnapshot {
5108 fn clone(&self) -> Self {
5109 Self {
5110 text: self.text.clone(),
5111 syntax: self.syntax.clone(),
5112 file: self.file.clone(),
5113 remote_selections: self.remote_selections.clone(),
5114 diagnostics: self.diagnostics.clone(),
5115 language: self.language.clone(),
5116 tree_sitter_data: self.tree_sitter_data.clone(),
5117 non_text_state_update_count: self.non_text_state_update_count,
5118 }
5119 }
5120}
5121
5122impl Deref for BufferSnapshot {
5123 type Target = text::BufferSnapshot;
5124
5125 fn deref(&self) -> &Self::Target {
5126 &self.text
5127 }
5128}
5129
5130unsafe impl Send for BufferChunks<'_> {}
5131
5132impl<'a> BufferChunks<'a> {
5133 pub(crate) fn new(
5134 text: &'a Rope,
5135 range: Range<usize>,
5136 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5137 diagnostics: bool,
5138 buffer_snapshot: Option<&'a BufferSnapshot>,
5139 ) -> Self {
5140 let mut highlights = None;
5141 if let Some((captures, highlight_maps)) = syntax {
5142 highlights = Some(BufferChunkHighlights {
5143 captures,
5144 next_capture: None,
5145 stack: Default::default(),
5146 highlight_maps,
5147 })
5148 }
5149
5150 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5151 let chunks = text.chunks_in_range(range.clone());
5152
5153 let mut this = BufferChunks {
5154 range,
5155 buffer_snapshot,
5156 chunks,
5157 diagnostic_endpoints,
5158 error_depth: 0,
5159 warning_depth: 0,
5160 information_depth: 0,
5161 hint_depth: 0,
5162 unnecessary_depth: 0,
5163 underline: true,
5164 highlights,
5165 };
5166 this.initialize_diagnostic_endpoints();
5167 this
5168 }
5169
5170 /// Seeks to the given byte offset in the buffer.
5171 pub fn seek(&mut self, range: Range<usize>) {
5172 let old_range = std::mem::replace(&mut self.range, range.clone());
5173 self.chunks.set_range(self.range.clone());
5174 if let Some(highlights) = self.highlights.as_mut() {
5175 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5176 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5177 highlights
5178 .stack
5179 .retain(|(end_offset, _)| *end_offset > range.start);
5180 if let Some(capture) = &highlights.next_capture
5181 && range.start >= capture.node.start_byte()
5182 {
5183 let next_capture_end = capture.node.end_byte();
5184 if range.start < next_capture_end {
5185 highlights.stack.push((
5186 next_capture_end,
5187 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5188 ));
5189 }
5190 highlights.next_capture.take();
5191 }
5192 } else if let Some(snapshot) = self.buffer_snapshot {
5193 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5194 *highlights = BufferChunkHighlights {
5195 captures,
5196 next_capture: None,
5197 stack: Default::default(),
5198 highlight_maps,
5199 };
5200 } else {
5201 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5202 // Seeking such BufferChunks is not supported.
5203 debug_assert!(
5204 false,
5205 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5206 );
5207 }
5208
5209 highlights.captures.set_byte_range(self.range.clone());
5210 self.initialize_diagnostic_endpoints();
5211 }
5212 }
5213
5214 fn initialize_diagnostic_endpoints(&mut self) {
5215 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5216 && let Some(buffer) = self.buffer_snapshot
5217 {
5218 let mut diagnostic_endpoints = Vec::new();
5219 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5220 diagnostic_endpoints.push(DiagnosticEndpoint {
5221 offset: entry.range.start,
5222 is_start: true,
5223 severity: entry.diagnostic.severity,
5224 is_unnecessary: entry.diagnostic.is_unnecessary,
5225 underline: entry.diagnostic.underline,
5226 });
5227 diagnostic_endpoints.push(DiagnosticEndpoint {
5228 offset: entry.range.end,
5229 is_start: false,
5230 severity: entry.diagnostic.severity,
5231 is_unnecessary: entry.diagnostic.is_unnecessary,
5232 underline: entry.diagnostic.underline,
5233 });
5234 }
5235 diagnostic_endpoints
5236 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5237 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5238 self.hint_depth = 0;
5239 self.error_depth = 0;
5240 self.warning_depth = 0;
5241 self.information_depth = 0;
5242 }
5243 }
5244
5245 /// The current byte offset in the buffer.
5246 pub fn offset(&self) -> usize {
5247 self.range.start
5248 }
5249
5250 pub fn range(&self) -> Range<usize> {
5251 self.range.clone()
5252 }
5253
5254 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5255 let depth = match endpoint.severity {
5256 DiagnosticSeverity::ERROR => &mut self.error_depth,
5257 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5258 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5259 DiagnosticSeverity::HINT => &mut self.hint_depth,
5260 _ => return,
5261 };
5262 if endpoint.is_start {
5263 *depth += 1;
5264 } else {
5265 *depth -= 1;
5266 }
5267
5268 if endpoint.is_unnecessary {
5269 if endpoint.is_start {
5270 self.unnecessary_depth += 1;
5271 } else {
5272 self.unnecessary_depth -= 1;
5273 }
5274 }
5275 }
5276
5277 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5278 if self.error_depth > 0 {
5279 Some(DiagnosticSeverity::ERROR)
5280 } else if self.warning_depth > 0 {
5281 Some(DiagnosticSeverity::WARNING)
5282 } else if self.information_depth > 0 {
5283 Some(DiagnosticSeverity::INFORMATION)
5284 } else if self.hint_depth > 0 {
5285 Some(DiagnosticSeverity::HINT)
5286 } else {
5287 None
5288 }
5289 }
5290
5291 fn current_code_is_unnecessary(&self) -> bool {
5292 self.unnecessary_depth > 0
5293 }
5294}
5295
5296impl<'a> Iterator for BufferChunks<'a> {
5297 type Item = Chunk<'a>;
5298
5299 fn next(&mut self) -> Option<Self::Item> {
5300 let mut next_capture_start = usize::MAX;
5301 let mut next_diagnostic_endpoint = usize::MAX;
5302
5303 if let Some(highlights) = self.highlights.as_mut() {
5304 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5305 if *parent_capture_end <= self.range.start {
5306 highlights.stack.pop();
5307 } else {
5308 break;
5309 }
5310 }
5311
5312 if highlights.next_capture.is_none() {
5313 highlights.next_capture = highlights.captures.next();
5314 }
5315
5316 while let Some(capture) = highlights.next_capture.as_ref() {
5317 if self.range.start < capture.node.start_byte() {
5318 next_capture_start = capture.node.start_byte();
5319 break;
5320 } else {
5321 let highlight_id =
5322 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5323 highlights
5324 .stack
5325 .push((capture.node.end_byte(), highlight_id));
5326 highlights.next_capture = highlights.captures.next();
5327 }
5328 }
5329 }
5330
5331 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5332 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5333 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5334 if endpoint.offset <= self.range.start {
5335 self.update_diagnostic_depths(endpoint);
5336 diagnostic_endpoints.next();
5337 self.underline = endpoint.underline;
5338 } else {
5339 next_diagnostic_endpoint = endpoint.offset;
5340 break;
5341 }
5342 }
5343 }
5344 self.diagnostic_endpoints = diagnostic_endpoints;
5345
5346 if let Some(ChunkBitmaps {
5347 text: chunk,
5348 chars: chars_map,
5349 tabs,
5350 }) = self.chunks.peek_with_bitmaps()
5351 {
5352 let chunk_start = self.range.start;
5353 let mut chunk_end = (self.chunks.offset() + chunk.len())
5354 .min(next_capture_start)
5355 .min(next_diagnostic_endpoint);
5356 let mut highlight_id = None;
5357 if let Some(highlights) = self.highlights.as_ref()
5358 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5359 {
5360 chunk_end = chunk_end.min(*parent_capture_end);
5361 highlight_id = Some(*parent_highlight_id);
5362 }
5363 let bit_start = chunk_start - self.chunks.offset();
5364 let bit_end = chunk_end - self.chunks.offset();
5365
5366 let slice = &chunk[bit_start..bit_end];
5367
5368 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5369 let tabs = (tabs >> bit_start) & mask;
5370 let chars = (chars_map >> bit_start) & mask;
5371
5372 self.range.start = chunk_end;
5373 if self.range.start == self.chunks.offset() + chunk.len() {
5374 self.chunks.next().unwrap();
5375 }
5376
5377 Some(Chunk {
5378 text: slice,
5379 syntax_highlight_id: highlight_id,
5380 underline: self.underline,
5381 diagnostic_severity: self.current_diagnostic_severity(),
5382 is_unnecessary: self.current_code_is_unnecessary(),
5383 tabs,
5384 chars,
5385 ..Chunk::default()
5386 })
5387 } else {
5388 None
5389 }
5390 }
5391}
5392
5393impl operation_queue::Operation for Operation {
5394 fn lamport_timestamp(&self) -> clock::Lamport {
5395 match self {
5396 Operation::Buffer(_) => {
5397 unreachable!("buffer operations should never be deferred at this layer")
5398 }
5399 Operation::UpdateDiagnostics {
5400 lamport_timestamp, ..
5401 }
5402 | Operation::UpdateSelections {
5403 lamport_timestamp, ..
5404 }
5405 | Operation::UpdateCompletionTriggers {
5406 lamport_timestamp, ..
5407 }
5408 | Operation::UpdateLineEnding {
5409 lamport_timestamp, ..
5410 } => *lamport_timestamp,
5411 }
5412 }
5413}
5414
5415impl Default for Diagnostic {
5416 fn default() -> Self {
5417 Self {
5418 source: Default::default(),
5419 source_kind: DiagnosticSourceKind::Other,
5420 code: None,
5421 code_description: None,
5422 severity: DiagnosticSeverity::ERROR,
5423 message: Default::default(),
5424 markdown: None,
5425 group_id: 0,
5426 is_primary: false,
5427 is_disk_based: false,
5428 is_unnecessary: false,
5429 underline: true,
5430 data: None,
5431 registration_id: None,
5432 }
5433 }
5434}
5435
5436impl IndentSize {
5437 /// Returns an [`IndentSize`] representing the given spaces.
5438 pub fn spaces(len: u32) -> Self {
5439 Self {
5440 len,
5441 kind: IndentKind::Space,
5442 }
5443 }
5444
5445 /// Returns an [`IndentSize`] representing a tab.
5446 pub fn tab() -> Self {
5447 Self {
5448 len: 1,
5449 kind: IndentKind::Tab,
5450 }
5451 }
5452
5453 /// An iterator over the characters represented by this [`IndentSize`].
5454 pub fn chars(&self) -> impl Iterator<Item = char> {
5455 iter::repeat(self.char()).take(self.len as usize)
5456 }
5457
5458 /// The character representation of this [`IndentSize`].
5459 pub fn char(&self) -> char {
5460 match self.kind {
5461 IndentKind::Space => ' ',
5462 IndentKind::Tab => '\t',
5463 }
5464 }
5465
5466 /// Consumes the current [`IndentSize`] and returns a new one that has
5467 /// been shrunk or enlarged by the given size along the given direction.
5468 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5469 match direction {
5470 Ordering::Less => {
5471 if self.kind == size.kind && self.len >= size.len {
5472 self.len -= size.len;
5473 }
5474 }
5475 Ordering::Equal => {}
5476 Ordering::Greater => {
5477 if self.len == 0 {
5478 self = size;
5479 } else if self.kind == size.kind {
5480 self.len += size.len;
5481 }
5482 }
5483 }
5484 self
5485 }
5486
5487 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5488 match self.kind {
5489 IndentKind::Space => self.len as usize,
5490 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5491 }
5492 }
5493}
5494
5495#[cfg(any(test, feature = "test-support"))]
5496pub struct TestFile {
5497 pub path: Arc<RelPath>,
5498 pub root_name: String,
5499 pub local_root: Option<PathBuf>,
5500}
5501
5502#[cfg(any(test, feature = "test-support"))]
5503impl File for TestFile {
5504 fn path(&self) -> &Arc<RelPath> {
5505 &self.path
5506 }
5507
5508 fn full_path(&self, _: &gpui::App) -> PathBuf {
5509 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5510 }
5511
5512 fn as_local(&self) -> Option<&dyn LocalFile> {
5513 if self.local_root.is_some() {
5514 Some(self)
5515 } else {
5516 None
5517 }
5518 }
5519
5520 fn disk_state(&self) -> DiskState {
5521 unimplemented!()
5522 }
5523
5524 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5525 self.path().file_name().unwrap_or(self.root_name.as_ref())
5526 }
5527
5528 fn worktree_id(&self, _: &App) -> WorktreeId {
5529 WorktreeId::from_usize(0)
5530 }
5531
5532 fn to_proto(&self, _: &App) -> rpc::proto::File {
5533 unimplemented!()
5534 }
5535
5536 fn is_private(&self) -> bool {
5537 false
5538 }
5539
5540 fn path_style(&self, _cx: &App) -> PathStyle {
5541 PathStyle::local()
5542 }
5543}
5544
5545#[cfg(any(test, feature = "test-support"))]
5546impl LocalFile for TestFile {
5547 fn abs_path(&self, _cx: &App) -> PathBuf {
5548 PathBuf::from(self.local_root.as_ref().unwrap())
5549 .join(&self.root_name)
5550 .join(self.path.as_std_path())
5551 }
5552
5553 fn load(&self, _cx: &App) -> Task<Result<String>> {
5554 unimplemented!()
5555 }
5556
5557 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5558 unimplemented!()
5559 }
5560}
5561
5562pub(crate) fn contiguous_ranges(
5563 values: impl Iterator<Item = u32>,
5564 max_len: usize,
5565) -> impl Iterator<Item = Range<u32>> {
5566 let mut values = values;
5567 let mut current_range: Option<Range<u32>> = None;
5568 std::iter::from_fn(move || {
5569 loop {
5570 if let Some(value) = values.next() {
5571 if let Some(range) = &mut current_range
5572 && value == range.end
5573 && range.len() < max_len
5574 {
5575 range.end += 1;
5576 continue;
5577 }
5578
5579 let prev_range = current_range.clone();
5580 current_range = Some(value..(value + 1));
5581 if prev_range.is_some() {
5582 return prev_range;
5583 }
5584 } else {
5585 return current_range.take();
5586 }
5587 }
5588 })
5589}
5590
5591#[derive(Default, Debug)]
5592pub struct CharClassifier {
5593 scope: Option<LanguageScope>,
5594 scope_context: Option<CharScopeContext>,
5595 ignore_punctuation: bool,
5596}
5597
5598impl CharClassifier {
5599 pub fn new(scope: Option<LanguageScope>) -> Self {
5600 Self {
5601 scope,
5602 scope_context: None,
5603 ignore_punctuation: false,
5604 }
5605 }
5606
5607 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5608 Self {
5609 scope_context,
5610 ..self
5611 }
5612 }
5613
5614 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5615 Self {
5616 ignore_punctuation,
5617 ..self
5618 }
5619 }
5620
5621 pub fn is_whitespace(&self, c: char) -> bool {
5622 self.kind(c) == CharKind::Whitespace
5623 }
5624
5625 pub fn is_word(&self, c: char) -> bool {
5626 self.kind(c) == CharKind::Word
5627 }
5628
5629 pub fn is_punctuation(&self, c: char) -> bool {
5630 self.kind(c) == CharKind::Punctuation
5631 }
5632
5633 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5634 if c.is_alphanumeric() || c == '_' {
5635 return CharKind::Word;
5636 }
5637
5638 if let Some(scope) = &self.scope {
5639 let characters = match self.scope_context {
5640 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5641 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5642 None => scope.word_characters(),
5643 };
5644 if let Some(characters) = characters
5645 && characters.contains(&c)
5646 {
5647 return CharKind::Word;
5648 }
5649 }
5650
5651 if c.is_whitespace() {
5652 return CharKind::Whitespace;
5653 }
5654
5655 if ignore_punctuation {
5656 CharKind::Word
5657 } else {
5658 CharKind::Punctuation
5659 }
5660 }
5661
5662 pub fn kind(&self, c: char) -> CharKind {
5663 self.kind_with(c, self.ignore_punctuation)
5664 }
5665}
5666
5667/// Find all of the ranges of whitespace that occur at the ends of lines
5668/// in the given rope.
5669///
5670/// This could also be done with a regex search, but this implementation
5671/// avoids copying text.
5672pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5673 let mut ranges = Vec::new();
5674
5675 let mut offset = 0;
5676 let mut prev_chunk_trailing_whitespace_range = 0..0;
5677 for chunk in rope.chunks() {
5678 let mut prev_line_trailing_whitespace_range = 0..0;
5679 for (i, line) in chunk.split('\n').enumerate() {
5680 let line_end_offset = offset + line.len();
5681 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5682 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5683
5684 if i == 0 && trimmed_line_len == 0 {
5685 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5686 }
5687 if !prev_line_trailing_whitespace_range.is_empty() {
5688 ranges.push(prev_line_trailing_whitespace_range);
5689 }
5690
5691 offset = line_end_offset + 1;
5692 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5693 }
5694
5695 offset -= 1;
5696 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5697 }
5698
5699 if !prev_chunk_trailing_whitespace_range.is_empty() {
5700 ranges.push(prev_chunk_trailing_whitespace_range);
5701 }
5702
5703 ranges
5704}