1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// The ID provided by the dynamic registration that produced this diagnostic.
249 pub registration_id: Option<SharedString>,
250 /// A machine-readable code that identifies this diagnostic.
251 pub code: Option<NumberOrString>,
252 pub code_description: Option<lsp::Uri>,
253 /// Whether this diagnostic is a hint, warning, or error.
254 pub severity: DiagnosticSeverity,
255 /// The human-readable message associated with this diagnostic.
256 pub message: String,
257 /// The human-readable message (in markdown format)
258 pub markdown: Option<String>,
259 /// An id that identifies the group to which this diagnostic belongs.
260 ///
261 /// When a language server produces a diagnostic with
262 /// one or more associated diagnostics, those diagnostics are all
263 /// assigned a single group ID.
264 pub group_id: usize,
265 /// Whether this diagnostic is the primary diagnostic for its group.
266 ///
267 /// In a given group, the primary diagnostic is the top-level diagnostic
268 /// returned by the language server. The non-primary diagnostics are the
269 /// associated diagnostics.
270 pub is_primary: bool,
271 /// Whether this diagnostic is considered to originate from an analysis of
272 /// files on disk, as opposed to any unsaved buffer contents. This is a
273 /// property of a given diagnostic source, and is configured for a given
274 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
275 /// for the language server.
276 pub is_disk_based: bool,
277 /// Whether this diagnostic marks unnecessary code.
278 pub is_unnecessary: bool,
279 /// Quick separation of diagnostics groups based by their source.
280 pub source_kind: DiagnosticSourceKind,
281 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
282 pub data: Option<Value>,
283 /// Whether to underline the corresponding text range in the editor.
284 pub underline: bool,
285}
286
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
288pub enum DiagnosticSourceKind {
289 Pulled,
290 Pushed,
291 Other,
292}
293
294/// An operation used to synchronize this buffer with its other replicas.
295#[derive(Clone, Debug, PartialEq)]
296pub enum Operation {
297 /// A text operation.
298 Buffer(text::Operation),
299
300 /// An update to the buffer's diagnostics.
301 UpdateDiagnostics {
302 /// The id of the language server that produced the new diagnostics.
303 server_id: LanguageServerId,
304 /// The diagnostics.
305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 },
309
310 /// An update to the most recent selections in this buffer.
311 UpdateSelections {
312 /// The selections.
313 selections: Arc<[Selection<Anchor>]>,
314 /// The buffer's lamport timestamp.
315 lamport_timestamp: clock::Lamport,
316 /// Whether the selections are in 'line mode'.
317 line_mode: bool,
318 /// The [`CursorShape`] associated with these selections.
319 cursor_shape: CursorShape,
320 },
321
322 /// An update to the characters that should trigger autocompletion
323 /// for this buffer.
324 UpdateCompletionTriggers {
325 /// The characters that trigger autocompletion.
326 triggers: Vec<String>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// The language server ID.
330 server_id: LanguageServerId,
331 },
332
333 /// An update to the line ending type of this buffer.
334 UpdateLineEnding {
335 /// The line ending type.
336 line_ending: LineEnding,
337 /// The buffer's lamport timestamp.
338 lamport_timestamp: clock::Lamport,
339 },
340}
341
342/// An event that occurs in a buffer.
343#[derive(Clone, Debug, PartialEq)]
344pub enum BufferEvent {
345 /// The buffer was changed in a way that must be
346 /// propagated to its other replicas.
347 Operation {
348 operation: Operation,
349 is_local: bool,
350 },
351 /// The buffer was edited.
352 Edited,
353 /// The buffer's `dirty` bit changed.
354 DirtyChanged,
355 /// The buffer was saved.
356 Saved,
357 /// The buffer's file was changed on disk.
358 FileHandleChanged,
359 /// The buffer was reloaded.
360 Reloaded,
361 /// The buffer is in need of a reload
362 ReloadNeeded,
363 /// The buffer's language was changed.
364 /// The boolean indicates whether this buffer did not have a language before, but does now.
365 LanguageChanged(bool),
366 /// The buffer's syntax trees were updated.
367 Reparsed,
368 /// The buffer's diagnostics were updated.
369 DiagnosticsUpdated,
370 /// The buffer gained or lost editing capabilities.
371 CapabilityChanged,
372}
373
374/// The file associated with a buffer.
375pub trait File: Send + Sync + Any {
376 /// Returns the [`LocalFile`] associated with this file, if the
377 /// file is local.
378 fn as_local(&self) -> Option<&dyn LocalFile>;
379
380 /// Returns whether this file is local.
381 fn is_local(&self) -> bool {
382 self.as_local().is_some()
383 }
384
385 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
386 /// only available in some states, such as modification time.
387 fn disk_state(&self) -> DiskState;
388
389 /// Returns the path of this file relative to the worktree's root directory.
390 fn path(&self) -> &Arc<RelPath>;
391
392 /// Returns the path of this file relative to the worktree's parent directory (this means it
393 /// includes the name of the worktree's root folder).
394 fn full_path(&self, cx: &App) -> PathBuf;
395
396 /// Returns the path style of this file.
397 fn path_style(&self, cx: &App) -> PathStyle;
398
399 /// Returns the last component of this handle's absolute path. If this handle refers to the root
400 /// of its worktree, then this method will return the name of the worktree itself.
401 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
402
403 /// Returns the id of the worktree to which this file belongs.
404 ///
405 /// This is needed for looking up project-specific settings.
406 fn worktree_id(&self, cx: &App) -> WorktreeId;
407
408 /// Converts this file into a protobuf message.
409 fn to_proto(&self, cx: &App) -> rpc::proto::File;
410
411 /// Return whether Zed considers this to be a private file.
412 fn is_private(&self) -> bool;
413}
414
415/// The file's storage status - whether it's stored (`Present`), and if so when it was last
416/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
417/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
418/// indicator for new files.
419#[derive(Copy, Clone, Debug, PartialEq)]
420pub enum DiskState {
421 /// File created in Zed that has not been saved.
422 New,
423 /// File present on the filesystem.
424 Present { mtime: MTime },
425 /// Deleted file that was previously present.
426 Deleted,
427}
428
429impl DiskState {
430 /// Returns the file's last known modification time on disk.
431 pub fn mtime(self) -> Option<MTime> {
432 match self {
433 DiskState::New => None,
434 DiskState::Present { mtime } => Some(mtime),
435 DiskState::Deleted => None,
436 }
437 }
438
439 pub fn exists(&self) -> bool {
440 match self {
441 DiskState::New => false,
442 DiskState::Present { .. } => true,
443 DiskState::Deleted => false,
444 }
445 }
446}
447
448/// The file associated with a buffer, in the case where the file is on the local disk.
449pub trait LocalFile: File {
450 /// Returns the absolute path of this file
451 fn abs_path(&self, cx: &App) -> PathBuf;
452
453 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
454 fn load(&self, cx: &App) -> Task<Result<String>>;
455
456 /// Loads the file's contents from disk.
457 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
458}
459
460/// The auto-indent behavior associated with an editing operation.
461/// For some editing operations, each affected line of text has its
462/// indentation recomputed. For other operations, the entire block
463/// of edited text is adjusted uniformly.
464#[derive(Clone, Debug)]
465pub enum AutoindentMode {
466 /// Indent each line of inserted text.
467 EachLine,
468 /// Apply the same indentation adjustment to all of the lines
469 /// in a given insertion.
470 Block {
471 /// The original indentation column of the first line of each
472 /// insertion, if it has been copied.
473 ///
474 /// Knowing this makes it possible to preserve the relative indentation
475 /// of every line in the insertion from when it was copied.
476 ///
477 /// If the original indent column is `a`, and the first line of insertion
478 /// is then auto-indented to column `b`, then every other line of
479 /// the insertion will be auto-indented to column `b - a`
480 original_indent_columns: Vec<Option<u32>>,
481 },
482}
483
484#[derive(Clone)]
485struct AutoindentRequest {
486 before_edit: BufferSnapshot,
487 entries: Vec<AutoindentRequestEntry>,
488 is_block_mode: bool,
489 ignore_empty_lines: bool,
490}
491
492#[derive(Debug, Clone)]
493struct AutoindentRequestEntry {
494 /// A range of the buffer whose indentation should be adjusted.
495 range: Range<Anchor>,
496 /// Whether or not these lines should be considered brand new, for the
497 /// purpose of auto-indent. When text is not new, its indentation will
498 /// only be adjusted if the suggested indentation level has *changed*
499 /// since the edit was made.
500 first_line_is_new: bool,
501 indent_size: IndentSize,
502 original_indent_column: Option<u32>,
503}
504
505#[derive(Debug)]
506struct IndentSuggestion {
507 basis_row: u32,
508 delta: Ordering,
509 within_error: bool,
510}
511
512struct BufferChunkHighlights<'a> {
513 captures: SyntaxMapCaptures<'a>,
514 next_capture: Option<SyntaxMapCapture<'a>>,
515 stack: Vec<(usize, HighlightId)>,
516 highlight_maps: Vec<HighlightMap>,
517}
518
519/// An iterator that yields chunks of a buffer's text, along with their
520/// syntax highlights and diagnostic status.
521pub struct BufferChunks<'a> {
522 buffer_snapshot: Option<&'a BufferSnapshot>,
523 range: Range<usize>,
524 chunks: text::Chunks<'a>,
525 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
526 error_depth: usize,
527 warning_depth: usize,
528 information_depth: usize,
529 hint_depth: usize,
530 unnecessary_depth: usize,
531 underline: bool,
532 highlights: Option<BufferChunkHighlights<'a>>,
533}
534
535/// A chunk of a buffer's text, along with its syntax highlight and
536/// diagnostic status.
537#[derive(Clone, Debug, Default)]
538pub struct Chunk<'a> {
539 /// The text of the chunk.
540 pub text: &'a str,
541 /// The syntax highlighting style of the chunk.
542 pub syntax_highlight_id: Option<HighlightId>,
543 /// The highlight style that has been applied to this chunk in
544 /// the editor.
545 pub highlight_style: Option<HighlightStyle>,
546 /// The severity of diagnostic associated with this chunk, if any.
547 pub diagnostic_severity: Option<DiagnosticSeverity>,
548 /// A bitset of which characters are tabs in this string.
549 pub tabs: u128,
550 /// Bitmap of character indices in this chunk
551 pub chars: u128,
552 /// Whether this chunk of text is marked as unnecessary.
553 pub is_unnecessary: bool,
554 /// Whether this chunk of text was originally a tab character.
555 pub is_tab: bool,
556 /// Whether this chunk of text was originally an inlay.
557 pub is_inlay: bool,
558 /// Whether to underline the corresponding text range in the editor.
559 pub underline: bool,
560}
561
562/// A set of edits to a given version of a buffer, computed asynchronously.
563#[derive(Debug)]
564pub struct Diff {
565 pub base_version: clock::Global,
566 pub line_ending: LineEnding,
567 pub edits: Vec<(Range<usize>, Arc<str>)>,
568}
569
570#[derive(Debug, Clone, Copy)]
571pub(crate) struct DiagnosticEndpoint {
572 offset: usize,
573 is_start: bool,
574 underline: bool,
575 severity: DiagnosticSeverity,
576 is_unnecessary: bool,
577}
578
579/// A class of characters, used for characterizing a run of text.
580#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
581pub enum CharKind {
582 /// Whitespace.
583 Whitespace,
584 /// Punctuation.
585 Punctuation,
586 /// Word.
587 Word,
588}
589
590/// Context for character classification within a specific scope.
591#[derive(Copy, Clone, Eq, PartialEq, Debug)]
592pub enum CharScopeContext {
593 /// Character classification for completion queries.
594 ///
595 /// This context treats certain characters as word constituents that would
596 /// normally be considered punctuation, such as '-' in Tailwind classes
597 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
598 Completion,
599 /// Character classification for linked edits.
600 ///
601 /// This context handles characters that should be treated as part of
602 /// identifiers during linked editing operations, such as '.' in JSX
603 /// component names like `<Animated.View>`.
604 LinkedEdit,
605}
606
607/// A runnable is a set of data about a region that could be resolved into a task
608pub struct Runnable {
609 pub tags: SmallVec<[RunnableTag; 1]>,
610 pub language: Arc<Language>,
611 pub buffer: BufferId,
612}
613
614#[derive(Default, Clone, Debug)]
615pub struct HighlightedText {
616 pub text: SharedString,
617 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
618}
619
620#[derive(Default, Debug)]
621struct HighlightedTextBuilder {
622 pub text: String,
623 highlights: Vec<(Range<usize>, HighlightStyle)>,
624}
625
626impl HighlightedText {
627 pub fn from_buffer_range<T: ToOffset>(
628 range: Range<T>,
629 snapshot: &text::BufferSnapshot,
630 syntax_snapshot: &SyntaxSnapshot,
631 override_style: Option<HighlightStyle>,
632 syntax_theme: &SyntaxTheme,
633 ) -> Self {
634 let mut highlighted_text = HighlightedTextBuilder::default();
635 highlighted_text.add_text_from_buffer_range(
636 range,
637 snapshot,
638 syntax_snapshot,
639 override_style,
640 syntax_theme,
641 );
642 highlighted_text.build()
643 }
644
645 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
646 gpui::StyledText::new(self.text.clone())
647 .with_default_highlights(default_style, self.highlights.iter().cloned())
648 }
649
650 /// Returns the first line without leading whitespace unless highlighted
651 /// and a boolean indicating if there are more lines after
652 pub fn first_line_preview(self) -> (Self, bool) {
653 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
654 let first_line = &self.text[..newline_ix];
655
656 // Trim leading whitespace, unless an edit starts prior to it.
657 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
658 if let Some((first_highlight_range, _)) = self.highlights.first() {
659 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
660 }
661
662 let preview_text = &first_line[preview_start_ix..];
663 let preview_highlights = self
664 .highlights
665 .into_iter()
666 .skip_while(|(range, _)| range.end <= preview_start_ix)
667 .take_while(|(range, _)| range.start < newline_ix)
668 .filter_map(|(mut range, highlight)| {
669 range.start = range.start.saturating_sub(preview_start_ix);
670 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
671 if range.is_empty() {
672 None
673 } else {
674 Some((range, highlight))
675 }
676 });
677
678 let preview = Self {
679 text: SharedString::new(preview_text),
680 highlights: preview_highlights.collect(),
681 };
682
683 (preview, self.text.len() > newline_ix)
684 }
685}
686
687impl HighlightedTextBuilder {
688 pub fn build(self) -> HighlightedText {
689 HighlightedText {
690 text: self.text.into(),
691 highlights: self.highlights,
692 }
693 }
694
695 pub fn add_text_from_buffer_range<T: ToOffset>(
696 &mut self,
697 range: Range<T>,
698 snapshot: &text::BufferSnapshot,
699 syntax_snapshot: &SyntaxSnapshot,
700 override_style: Option<HighlightStyle>,
701 syntax_theme: &SyntaxTheme,
702 ) {
703 let range = range.to_offset(snapshot);
704 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
705 let start = self.text.len();
706 self.text.push_str(chunk.text);
707 let end = self.text.len();
708
709 if let Some(highlight_style) = chunk
710 .syntax_highlight_id
711 .and_then(|id| id.style(syntax_theme))
712 {
713 let highlight_style = override_style.map_or(highlight_style, |override_style| {
714 highlight_style.highlight(override_style)
715 });
716 self.highlights.push((start..end, highlight_style));
717 } else if let Some(override_style) = override_style {
718 self.highlights.push((start..end, override_style));
719 }
720 }
721 }
722
723 fn highlighted_chunks<'a>(
724 range: Range<usize>,
725 snapshot: &'a text::BufferSnapshot,
726 syntax_snapshot: &'a SyntaxSnapshot,
727 ) -> BufferChunks<'a> {
728 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
729 grammar
730 .highlights_config
731 .as_ref()
732 .map(|config| &config.query)
733 });
734
735 let highlight_maps = captures
736 .grammars()
737 .iter()
738 .map(|grammar| grammar.highlight_map())
739 .collect();
740
741 BufferChunks::new(
742 snapshot.as_rope(),
743 range,
744 Some((captures, highlight_maps)),
745 false,
746 None,
747 )
748 }
749}
750
751#[derive(Clone)]
752pub struct EditPreview {
753 old_snapshot: text::BufferSnapshot,
754 applied_edits_snapshot: text::BufferSnapshot,
755 syntax_snapshot: SyntaxSnapshot,
756}
757
758impl EditPreview {
759 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first.start.to_point(&self.old_snapshot);
764 let old_end = last.end.to_point(&self.old_snapshot);
765 let new_end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 let start = Point::new(start.row.saturating_sub(3), 0);
771 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
772 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
773
774 Some(unified_diff(
775 &self
776 .old_snapshot
777 .text_for_range(start..old_end)
778 .collect::<String>(),
779 &self
780 .applied_edits_snapshot
781 .text_for_range(start..new_end)
782 .collect::<String>(),
783 ))
784 }
785
786 pub fn highlight_edits(
787 &self,
788 current_snapshot: &BufferSnapshot,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 include_deletions: bool,
791 cx: &App,
792 ) -> HighlightedText {
793 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
794 return HighlightedText::default();
795 };
796
797 let mut highlighted_text = HighlightedTextBuilder::default();
798
799 let visible_range_in_preview_snapshot =
800 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
801 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
802
803 let insertion_highlight_style = HighlightStyle {
804 background_color: Some(cx.theme().status().created_background),
805 ..Default::default()
806 };
807 let deletion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().deleted_background),
809 ..Default::default()
810 };
811 let syntax_theme = cx.theme().syntax();
812
813 for (range, edit_text) in edits {
814 let edit_new_end_in_preview_snapshot = range
815 .end
816 .bias_right(&self.old_snapshot)
817 .to_offset(&self.applied_edits_snapshot);
818 let edit_start_in_preview_snapshot =
819 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
820
821 let unchanged_range_in_preview_snapshot =
822 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
823 if !unchanged_range_in_preview_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 unchanged_range_in_preview_snapshot,
826 &self.applied_edits_snapshot,
827 &self.syntax_snapshot,
828 None,
829 syntax_theme,
830 );
831 }
832
833 let range_in_current_snapshot = range.to_offset(current_snapshot);
834 if include_deletions && !range_in_current_snapshot.is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 range_in_current_snapshot,
837 ¤t_snapshot.text,
838 ¤t_snapshot.syntax,
839 Some(deletion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 if !edit_text.as_ref().is_empty() {
845 highlighted_text.add_text_from_buffer_range(
846 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
847 &self.applied_edits_snapshot,
848 &self.syntax_snapshot,
849 Some(insertion_highlight_style),
850 syntax_theme,
851 );
852 }
853
854 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
855 }
856
857 highlighted_text.add_text_from_buffer_range(
858 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
859 &self.applied_edits_snapshot,
860 &self.syntax_snapshot,
861 None,
862 syntax_theme,
863 );
864
865 highlighted_text.build()
866 }
867
868 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
869 cx.new(|cx| {
870 let mut buffer = Buffer::local_normalized(
871 self.applied_edits_snapshot.as_rope().clone(),
872 self.applied_edits_snapshot.line_ending(),
873 cx,
874 );
875 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
876 buffer
877 })
878 }
879
880 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
881 let (first, _) = edits.first()?;
882 let (last, _) = edits.last()?;
883
884 let start = first
885 .start
886 .bias_left(&self.old_snapshot)
887 .to_point(&self.applied_edits_snapshot);
888 let end = last
889 .end
890 .bias_right(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892
893 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
894 let range = Point::new(start.row, 0)
895 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
896
897 Some(range)
898 }
899}
900
901#[derive(Clone, Debug, PartialEq, Eq)]
902pub struct BracketMatch<T> {
903 pub open_range: Range<T>,
904 pub close_range: Range<T>,
905 pub newline_only: bool,
906 pub syntax_layer_depth: usize,
907 pub color_index: Option<usize>,
908}
909
910impl<T> BracketMatch<T> {
911 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
912 (self.open_range, self.close_range)
913 }
914}
915
916impl Buffer {
917 /// Create a new buffer with the given base text.
918 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
919 Self::build(
920 TextBuffer::new(
921 ReplicaId::LOCAL,
922 cx.entity_id().as_non_zero_u64().into(),
923 base_text.into(),
924 ),
925 None,
926 Capability::ReadWrite,
927 )
928 }
929
930 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
931 pub fn local_normalized(
932 base_text_normalized: Rope,
933 line_ending: LineEnding,
934 cx: &Context<Self>,
935 ) -> Self {
936 Self::build(
937 TextBuffer::new_normalized(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 line_ending,
941 base_text_normalized,
942 ),
943 None,
944 Capability::ReadWrite,
945 )
946 }
947
948 /// Create a new buffer that is a replica of a remote buffer.
949 pub fn remote(
950 remote_id: BufferId,
951 replica_id: ReplicaId,
952 capability: Capability,
953 base_text: impl Into<String>,
954 ) -> Self {
955 Self::build(
956 TextBuffer::new(replica_id, remote_id, base_text.into()),
957 None,
958 capability,
959 )
960 }
961
962 /// Create a new buffer that is a replica of a remote buffer, populating its
963 /// state from the given protobuf message.
964 pub fn from_proto(
965 replica_id: ReplicaId,
966 capability: Capability,
967 message: proto::BufferState,
968 file: Option<Arc<dyn File>>,
969 ) -> Result<Self> {
970 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
971 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
972 let mut this = Self::build(buffer, file, capability);
973 this.text.set_line_ending(proto::deserialize_line_ending(
974 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
975 ));
976 this.saved_version = proto::deserialize_version(&message.saved_version);
977 this.saved_mtime = message.saved_mtime.map(|time| time.into());
978 Ok(this)
979 }
980
981 /// Serialize the buffer's state to a protobuf message.
982 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
983 proto::BufferState {
984 id: self.remote_id().into(),
985 file: self.file.as_ref().map(|f| f.to_proto(cx)),
986 base_text: self.base_text().to_string(),
987 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
988 saved_version: proto::serialize_version(&self.saved_version),
989 saved_mtime: self.saved_mtime.map(|time| time.into()),
990 }
991 }
992
993 /// Serialize as protobufs all of the changes to the buffer since the given version.
994 pub fn serialize_ops(
995 &self,
996 since: Option<clock::Global>,
997 cx: &App,
998 ) -> Task<Vec<proto::Operation>> {
999 let mut operations = Vec::new();
1000 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1001
1002 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1003 proto::serialize_operation(&Operation::UpdateSelections {
1004 selections: set.selections.clone(),
1005 lamport_timestamp: set.lamport_timestamp,
1006 line_mode: set.line_mode,
1007 cursor_shape: set.cursor_shape,
1008 })
1009 }));
1010
1011 for (server_id, diagnostics) in &self.diagnostics {
1012 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1013 lamport_timestamp: self.diagnostics_timestamp,
1014 server_id: *server_id,
1015 diagnostics: diagnostics.iter().cloned().collect(),
1016 }));
1017 }
1018
1019 for (server_id, completions) in &self.completion_triggers_per_language_server {
1020 operations.push(proto::serialize_operation(
1021 &Operation::UpdateCompletionTriggers {
1022 triggers: completions.iter().cloned().collect(),
1023 lamport_timestamp: self.completion_triggers_timestamp,
1024 server_id: *server_id,
1025 },
1026 ));
1027 }
1028
1029 let text_operations = self.text.operations().clone();
1030 cx.background_spawn(async move {
1031 let since = since.unwrap_or_default();
1032 operations.extend(
1033 text_operations
1034 .iter()
1035 .filter(|(_, op)| !since.observed(op.timestamp()))
1036 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1037 );
1038 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1039 operations
1040 })
1041 }
1042
1043 /// Assign a language to the buffer, returning the buffer.
1044 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1045 self.set_language_async(Some(language), cx);
1046 self
1047 }
1048
1049 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1050 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1051 self.set_language(Some(language), cx);
1052 self
1053 }
1054
1055 /// Returns the [`Capability`] of this buffer.
1056 pub fn capability(&self) -> Capability {
1057 self.capability
1058 }
1059
1060 /// Whether this buffer can only be read.
1061 pub fn read_only(&self) -> bool {
1062 self.capability == Capability::ReadOnly
1063 }
1064
1065 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1066 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1067 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1068 let snapshot = buffer.snapshot();
1069 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1070 let tree_sitter_data = TreeSitterData::new(snapshot);
1071 Self {
1072 saved_mtime,
1073 tree_sitter_data: Arc::new(tree_sitter_data),
1074 saved_version: buffer.version(),
1075 preview_version: buffer.version(),
1076 reload_task: None,
1077 transaction_depth: 0,
1078 was_dirty_before_starting_transaction: None,
1079 has_unsaved_edits: Cell::new((buffer.version(), false)),
1080 text: buffer,
1081 branch_state: None,
1082 file,
1083 capability,
1084 syntax_map,
1085 reparse: None,
1086 non_text_state_update_count: 0,
1087 sync_parse_timeout: Duration::from_millis(1),
1088 parse_status: watch::channel(ParseStatus::Idle),
1089 autoindent_requests: Default::default(),
1090 wait_for_autoindent_txs: Default::default(),
1091 pending_autoindent: Default::default(),
1092 language: None,
1093 remote_selections: Default::default(),
1094 diagnostics: Default::default(),
1095 diagnostics_timestamp: Lamport::MIN,
1096 completion_triggers: Default::default(),
1097 completion_triggers_per_language_server: Default::default(),
1098 completion_triggers_timestamp: Lamport::MIN,
1099 deferred_ops: OperationQueue::new(),
1100 has_conflict: false,
1101 change_bits: Default::default(),
1102 _subscriptions: Vec::new(),
1103 }
1104 }
1105
1106 pub fn build_snapshot(
1107 text: Rope,
1108 language: Option<Arc<Language>>,
1109 language_registry: Option<Arc<LanguageRegistry>>,
1110 cx: &mut App,
1111 ) -> impl Future<Output = BufferSnapshot> + use<> {
1112 let entity_id = cx.reserve_entity::<Self>().entity_id();
1113 let buffer_id = entity_id.as_non_zero_u64().into();
1114 async move {
1115 let text =
1116 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1117 .snapshot();
1118 let mut syntax = SyntaxMap::new(&text).snapshot();
1119 if let Some(language) = language.clone() {
1120 let language_registry = language_registry.clone();
1121 syntax.reparse(&text, language_registry, language);
1122 }
1123 let tree_sitter_data = TreeSitterData::new(text.clone());
1124 BufferSnapshot {
1125 text,
1126 syntax,
1127 file: None,
1128 diagnostics: Default::default(),
1129 remote_selections: Default::default(),
1130 tree_sitter_data: Arc::new(tree_sitter_data),
1131 language,
1132 non_text_state_update_count: 0,
1133 }
1134 }
1135 }
1136
1137 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1138 let entity_id = cx.reserve_entity::<Self>().entity_id();
1139 let buffer_id = entity_id.as_non_zero_u64().into();
1140 let text = TextBuffer::new_normalized(
1141 ReplicaId::LOCAL,
1142 buffer_id,
1143 Default::default(),
1144 Rope::new(),
1145 )
1146 .snapshot();
1147 let syntax = SyntaxMap::new(&text).snapshot();
1148 let tree_sitter_data = TreeSitterData::new(text.clone());
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(tree_sitter_data),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language: None,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 #[cfg(any(test, feature = "test-support"))]
1162 pub fn build_snapshot_sync(
1163 text: Rope,
1164 language: Option<Arc<Language>>,
1165 language_registry: Option<Arc<LanguageRegistry>>,
1166 cx: &mut App,
1167 ) -> BufferSnapshot {
1168 let entity_id = cx.reserve_entity::<Self>().entity_id();
1169 let buffer_id = entity_id.as_non_zero_u64().into();
1170 let text =
1171 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1172 .snapshot();
1173 let mut syntax = SyntaxMap::new(&text).snapshot();
1174 if let Some(language) = language.clone() {
1175 syntax.reparse(&text, language_registry, language);
1176 }
1177 let tree_sitter_data = TreeSitterData::new(text.clone());
1178 BufferSnapshot {
1179 text,
1180 syntax,
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 file: None,
1183 diagnostics: Default::default(),
1184 remote_selections: Default::default(),
1185 language,
1186 non_text_state_update_count: 0,
1187 }
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's current state. This is computationally
1191 /// cheap, and allows reading from the buffer on a background thread.
1192 pub fn snapshot(&self) -> BufferSnapshot {
1193 let text = self.text.snapshot();
1194 let mut syntax_map = self.syntax_map.lock();
1195 syntax_map.interpolate(&text);
1196 let syntax = syntax_map.snapshot();
1197
1198 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1199 Arc::new(TreeSitterData::new(text.clone()))
1200 } else {
1201 self.tree_sitter_data.clone()
1202 };
1203
1204 BufferSnapshot {
1205 text,
1206 syntax,
1207 tree_sitter_data,
1208 file: self.file.clone(),
1209 remote_selections: self.remote_selections.clone(),
1210 diagnostics: self.diagnostics.clone(),
1211 language: self.language.clone(),
1212 non_text_state_update_count: self.non_text_state_update_count,
1213 }
1214 }
1215
1216 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1217 let this = cx.entity();
1218 cx.new(|cx| {
1219 let mut branch = Self {
1220 branch_state: Some(BufferBranchState {
1221 base_buffer: this.clone(),
1222 merged_operations: Default::default(),
1223 }),
1224 language: self.language.clone(),
1225 has_conflict: self.has_conflict,
1226 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1227 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1228 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1229 };
1230 if let Some(language_registry) = self.language_registry() {
1231 branch.set_language_registry(language_registry);
1232 }
1233
1234 // Reparse the branch buffer so that we get syntax highlighting immediately.
1235 branch.reparse(cx, true);
1236
1237 branch
1238 })
1239 }
1240
1241 pub fn preview_edits(
1242 &self,
1243 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1244 cx: &App,
1245 ) -> Task<EditPreview> {
1246 let registry = self.language_registry();
1247 let language = self.language().cloned();
1248 let old_snapshot = self.text.snapshot();
1249 let mut branch_buffer = self.text.branch();
1250 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1251 cx.background_spawn(async move {
1252 if !edits.is_empty() {
1253 if let Some(language) = language.clone() {
1254 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1255 }
1256
1257 branch_buffer.edit(edits.iter().cloned());
1258 let snapshot = branch_buffer.snapshot();
1259 syntax_snapshot.interpolate(&snapshot);
1260
1261 if let Some(language) = language {
1262 syntax_snapshot.reparse(&snapshot, registry, language);
1263 }
1264 }
1265 EditPreview {
1266 old_snapshot,
1267 applied_edits_snapshot: branch_buffer.snapshot(),
1268 syntax_snapshot,
1269 }
1270 })
1271 }
1272
1273 /// Applies all of the changes in this buffer that intersect any of the
1274 /// given `ranges` to its base buffer.
1275 ///
1276 /// If `ranges` is empty, then all changes will be applied. This buffer must
1277 /// be a branch buffer to call this method.
1278 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1279 let Some(base_buffer) = self.base_buffer() else {
1280 debug_panic!("not a branch buffer");
1281 return;
1282 };
1283
1284 let mut ranges = if ranges.is_empty() {
1285 &[0..usize::MAX]
1286 } else {
1287 ranges.as_slice()
1288 }
1289 .iter()
1290 .peekable();
1291
1292 let mut edits = Vec::new();
1293 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1294 let mut is_included = false;
1295 while let Some(range) = ranges.peek() {
1296 if range.end < edit.new.start {
1297 ranges.next().unwrap();
1298 } else {
1299 if range.start <= edit.new.end {
1300 is_included = true;
1301 }
1302 break;
1303 }
1304 }
1305
1306 if is_included {
1307 edits.push((
1308 edit.old.clone(),
1309 self.text_for_range(edit.new.clone()).collect::<String>(),
1310 ));
1311 }
1312 }
1313
1314 let operation = base_buffer.update(cx, |base_buffer, cx| {
1315 // cx.emit(BufferEvent::DiffBaseChanged);
1316 base_buffer.edit(edits, None, cx)
1317 });
1318
1319 if let Some(operation) = operation
1320 && let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 {
1324 merged_operations.push(operation);
1325 }
1326 }
1327
1328 fn on_base_buffer_event(
1329 &mut self,
1330 _: Entity<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 let BufferEvent::Operation { operation, .. } = event else {
1335 return;
1336 };
1337 let Some(BufferBranchState {
1338 merged_operations, ..
1339 }) = &mut self.branch_state
1340 else {
1341 return;
1342 };
1343
1344 let mut operation_to_undo = None;
1345 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1346 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1347 {
1348 merged_operations.remove(ix);
1349 operation_to_undo = Some(operation.timestamp);
1350 }
1351
1352 self.apply_ops([operation.clone()], cx);
1353
1354 if let Some(timestamp) = operation_to_undo {
1355 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1356 self.undo_operations(counts, cx);
1357 }
1358 }
1359
1360 #[cfg(test)]
1361 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1362 &self.text
1363 }
1364
1365 /// Retrieve a snapshot of the buffer's raw text, without any
1366 /// language-related state like the syntax tree or diagnostics.
1367 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1368 self.text.snapshot()
1369 }
1370
1371 /// The file associated with the buffer, if any.
1372 pub fn file(&self) -> Option<&Arc<dyn File>> {
1373 self.file.as_ref()
1374 }
1375
1376 /// The version of the buffer that was last saved or reloaded from disk.
1377 pub fn saved_version(&self) -> &clock::Global {
1378 &self.saved_version
1379 }
1380
1381 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1382 pub fn saved_mtime(&self) -> Option<MTime> {
1383 self.saved_mtime
1384 }
1385
1386 /// Assign a language to the buffer.
1387 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1388 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1389 }
1390
1391 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1392 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1393 self.set_language_(language, true, cx);
1394 }
1395
1396 fn set_language_(
1397 &mut self,
1398 language: Option<Arc<Language>>,
1399 may_block: bool,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.non_text_state_update_count += 1;
1403 self.syntax_map.lock().clear(&self.text);
1404 let old_language = std::mem::replace(&mut self.language, language);
1405 self.was_changed();
1406 self.reparse(cx, may_block);
1407 let has_fresh_language =
1408 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1409 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1410 }
1411
1412 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1413 /// other languages if parts of the buffer are written in different languages.
1414 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1415 self.syntax_map
1416 .lock()
1417 .set_language_registry(language_registry);
1418 }
1419
1420 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1421 self.syntax_map.lock().language_registry()
1422 }
1423
1424 /// Assign the line ending type to the buffer.
1425 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1426 self.text.set_line_ending(line_ending);
1427
1428 let lamport_timestamp = self.text.lamport_clock.tick();
1429 self.send_operation(
1430 Operation::UpdateLineEnding {
1431 line_ending,
1432 lamport_timestamp,
1433 },
1434 true,
1435 cx,
1436 );
1437 }
1438
1439 /// Assign the buffer a new [`Capability`].
1440 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1441 if self.capability != capability {
1442 self.capability = capability;
1443 cx.emit(BufferEvent::CapabilityChanged)
1444 }
1445 }
1446
1447 /// This method is called to signal that the buffer has been saved.
1448 pub fn did_save(
1449 &mut self,
1450 version: clock::Global,
1451 mtime: Option<MTime>,
1452 cx: &mut Context<Self>,
1453 ) {
1454 self.saved_version = version.clone();
1455 self.has_unsaved_edits.set((version, false));
1456 self.has_conflict = false;
1457 self.saved_mtime = mtime;
1458 self.was_changed();
1459 cx.emit(BufferEvent::Saved);
1460 cx.notify();
1461 }
1462
1463 /// Reloads the contents of the buffer from disk.
1464 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1465 let (tx, rx) = futures::channel::oneshot::channel();
1466 let prev_version = self.text.version();
1467 self.reload_task = Some(cx.spawn(async move |this, cx| {
1468 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1469 let file = this.file.as_ref()?.as_local()?;
1470
1471 Some((file.disk_state().mtime(), file.load(cx)))
1472 })?
1473 else {
1474 return Ok(());
1475 };
1476
1477 let new_text = new_text.await?;
1478 let diff = this
1479 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1480 .await;
1481 this.update(cx, |this, cx| {
1482 if this.version() == diff.base_version {
1483 this.finalize_last_transaction();
1484 this.apply_diff(diff, cx);
1485 tx.send(this.finalize_last_transaction().cloned()).ok();
1486 this.has_conflict = false;
1487 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1488 } else {
1489 if !diff.edits.is_empty()
1490 || this
1491 .edits_since::<usize>(&diff.base_version)
1492 .next()
1493 .is_some()
1494 {
1495 this.has_conflict = true;
1496 }
1497
1498 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1499 }
1500
1501 this.reload_task.take();
1502 })
1503 }));
1504 rx
1505 }
1506
1507 /// This method is called to signal that the buffer has been reloaded.
1508 pub fn did_reload(
1509 &mut self,
1510 version: clock::Global,
1511 line_ending: LineEnding,
1512 mtime: Option<MTime>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 self.saved_version = version;
1516 self.has_unsaved_edits
1517 .set((self.saved_version.clone(), false));
1518 self.text.set_line_ending(line_ending);
1519 self.saved_mtime = mtime;
1520 cx.emit(BufferEvent::Reloaded);
1521 cx.notify();
1522 }
1523
1524 /// Updates the [`File`] backing this buffer. This should be called when
1525 /// the file has changed or has been deleted.
1526 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1527 let was_dirty = self.is_dirty();
1528 let mut file_changed = false;
1529
1530 if let Some(old_file) = self.file.as_ref() {
1531 if new_file.path() != old_file.path() {
1532 file_changed = true;
1533 }
1534
1535 let old_state = old_file.disk_state();
1536 let new_state = new_file.disk_state();
1537 if old_state != new_state {
1538 file_changed = true;
1539 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1540 cx.emit(BufferEvent::ReloadNeeded)
1541 }
1542 }
1543 } else {
1544 file_changed = true;
1545 };
1546
1547 self.file = Some(new_file);
1548 if file_changed {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 if was_dirty != self.is_dirty() {
1552 cx.emit(BufferEvent::DirtyChanged);
1553 }
1554 cx.emit(BufferEvent::FileHandleChanged);
1555 cx.notify();
1556 }
1557 }
1558
1559 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1560 Some(self.branch_state.as_ref()?.base_buffer.clone())
1561 }
1562
1563 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1564 pub fn language(&self) -> Option<&Arc<Language>> {
1565 self.language.as_ref()
1566 }
1567
1568 /// Returns the [`Language`] at the given location.
1569 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1570 let offset = position.to_offset(self);
1571 let mut is_first = true;
1572 let start_anchor = self.anchor_before(offset);
1573 let end_anchor = self.anchor_after(offset);
1574 self.syntax_map
1575 .lock()
1576 .layers_for_range(offset..offset, &self.text, false)
1577 .filter(|layer| {
1578 if is_first {
1579 is_first = false;
1580 return true;
1581 }
1582
1583 layer
1584 .included_sub_ranges
1585 .map(|sub_ranges| {
1586 sub_ranges.iter().any(|sub_range| {
1587 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1588 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1589 !is_before_start && !is_after_end
1590 })
1591 })
1592 .unwrap_or(true)
1593 })
1594 .last()
1595 .map(|info| info.language.clone())
1596 .or_else(|| self.language.clone())
1597 }
1598
1599 /// Returns each [`Language`] for the active syntax layers at the given location.
1600 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1601 let offset = position.to_offset(self);
1602 let mut languages: Vec<Arc<Language>> = self
1603 .syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .map(|info| info.language.clone())
1607 .collect();
1608
1609 if languages.is_empty()
1610 && let Some(buffer_language) = self.language()
1611 {
1612 languages.push(buffer_language.clone());
1613 }
1614
1615 languages
1616 }
1617
1618 /// An integer version number that accounts for all updates besides
1619 /// the buffer's text itself (which is versioned via a version vector).
1620 pub fn non_text_state_update_count(&self) -> usize {
1621 self.non_text_state_update_count
1622 }
1623
1624 /// Whether the buffer is being parsed in the background.
1625 #[cfg(any(test, feature = "test-support"))]
1626 pub fn is_parsing(&self) -> bool {
1627 self.reparse.is_some()
1628 }
1629
1630 /// Indicates whether the buffer contains any regions that may be
1631 /// written in a language that hasn't been loaded yet.
1632 pub fn contains_unknown_injections(&self) -> bool {
1633 self.syntax_map.lock().contains_unknown_injections()
1634 }
1635
1636 #[cfg(any(test, feature = "test-support"))]
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1642 match Arc::get_mut(&mut self.tree_sitter_data) {
1643 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1644 None => {
1645 let tree_sitter_data = TreeSitterData::new(snapshot);
1646 self.tree_sitter_data = Arc::new(tree_sitter_data)
1647 }
1648 }
1649 }
1650
1651 /// Called after an edit to synchronize the buffer's main parse tree with
1652 /// the buffer's new underlying state.
1653 ///
1654 /// Locks the syntax map and interpolates the edits since the last reparse
1655 /// into the foreground syntax tree.
1656 ///
1657 /// Then takes a stable snapshot of the syntax map before unlocking it.
1658 /// The snapshot with the interpolated edits is sent to a background thread,
1659 /// where we ask Tree-sitter to perform an incremental parse.
1660 ///
1661 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1662 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1663 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1664 ///
1665 /// If we time out waiting on the parse, we spawn a second task waiting
1666 /// until the parse does complete and return with the interpolated tree still
1667 /// in the foreground. When the background parse completes, call back into
1668 /// the main thread and assign the foreground parse state.
1669 ///
1670 /// If the buffer or grammar changed since the start of the background parse,
1671 /// initiate an additional reparse recursively. To avoid concurrent parses
1672 /// for the same buffer, we only initiate a new parse if we are not already
1673 /// parsing in the background.
1674 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1675 if self.reparse.is_some() {
1676 return;
1677 }
1678 let language = if let Some(language) = self.language.clone() {
1679 language
1680 } else {
1681 return;
1682 };
1683
1684 let text = self.text_snapshot();
1685 let parsed_version = self.version();
1686
1687 let mut syntax_map = self.syntax_map.lock();
1688 syntax_map.interpolate(&text);
1689 let language_registry = syntax_map.language_registry();
1690 let mut syntax_snapshot = syntax_map.snapshot();
1691 drop(syntax_map);
1692
1693 let parse_task = cx.background_spawn({
1694 let language = language.clone();
1695 let language_registry = language_registry.clone();
1696 async move {
1697 syntax_snapshot.reparse(&text, language_registry, language);
1698 syntax_snapshot
1699 }
1700 });
1701
1702 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1703 if may_block {
1704 match cx
1705 .background_executor()
1706 .block_with_timeout(self.sync_parse_timeout, parse_task)
1707 {
1708 Ok(new_syntax_snapshot) => {
1709 self.did_finish_parsing(new_syntax_snapshot, cx);
1710 self.reparse = None;
1711 }
1712 Err(parse_task) => {
1713 self.reparse = Some(cx.spawn(async move |this, cx| {
1714 let new_syntax_map = cx.background_spawn(parse_task).await;
1715 this.update(cx, move |this, cx| {
1716 let grammar_changed = || {
1717 this.language.as_ref().is_none_or(|current_language| {
1718 !Arc::ptr_eq(&language, current_language)
1719 })
1720 };
1721 let language_registry_changed = || {
1722 new_syntax_map.contains_unknown_injections()
1723 && language_registry.is_some_and(|registry| {
1724 registry.version()
1725 != new_syntax_map.language_registry_version()
1726 })
1727 };
1728 let parse_again = this.version.changed_since(&parsed_version)
1729 || language_registry_changed()
1730 || grammar_changed();
1731 this.did_finish_parsing(new_syntax_map, cx);
1732 this.reparse = None;
1733 if parse_again {
1734 this.reparse(cx, false);
1735 }
1736 })
1737 .ok();
1738 }));
1739 }
1740 }
1741 } else {
1742 self.reparse = Some(cx.spawn(async move |this, cx| {
1743 let new_syntax_map = cx.background_spawn(parse_task).await;
1744 this.update(cx, move |this, cx| {
1745 let grammar_changed = || {
1746 this.language.as_ref().is_none_or(|current_language| {
1747 !Arc::ptr_eq(&language, current_language)
1748 })
1749 };
1750 let language_registry_changed = || {
1751 new_syntax_map.contains_unknown_injections()
1752 && language_registry.is_some_and(|registry| {
1753 registry.version() != new_syntax_map.language_registry_version()
1754 })
1755 };
1756 let parse_again = this.version.changed_since(&parsed_version)
1757 || language_registry_changed()
1758 || grammar_changed();
1759 this.did_finish_parsing(new_syntax_map, cx);
1760 this.reparse = None;
1761 if parse_again {
1762 this.reparse(cx, false);
1763 }
1764 })
1765 .ok();
1766 }));
1767 }
1768 }
1769
1770 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1771 self.was_changed();
1772 self.non_text_state_update_count += 1;
1773 self.syntax_map.lock().did_parse(syntax_snapshot);
1774 self.request_autoindent(cx);
1775 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1776 self.invalidate_tree_sitter_data(self.text.snapshot());
1777 cx.emit(BufferEvent::Reparsed);
1778 cx.notify();
1779 }
1780
1781 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1782 self.parse_status.1.clone()
1783 }
1784
1785 /// Wait until the buffer is no longer parsing
1786 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1787 let mut parse_status = self.parse_status();
1788 async move {
1789 while *parse_status.borrow() != ParseStatus::Idle {
1790 if parse_status.changed().await.is_err() {
1791 break;
1792 }
1793 }
1794 }
1795 }
1796
1797 /// Assign to the buffer a set of diagnostics created by a given language server.
1798 pub fn update_diagnostics(
1799 &mut self,
1800 server_id: LanguageServerId,
1801 diagnostics: DiagnosticSet,
1802 cx: &mut Context<Self>,
1803 ) {
1804 let lamport_timestamp = self.text.lamport_clock.tick();
1805 let op = Operation::UpdateDiagnostics {
1806 server_id,
1807 diagnostics: diagnostics.iter().cloned().collect(),
1808 lamport_timestamp,
1809 };
1810
1811 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1812 self.send_operation(op, true, cx);
1813 }
1814
1815 pub fn buffer_diagnostics(
1816 &self,
1817 for_server: Option<LanguageServerId>,
1818 ) -> Vec<&DiagnosticEntry<Anchor>> {
1819 match for_server {
1820 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1821 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1822 Err(_) => Vec::new(),
1823 },
1824 None => self
1825 .diagnostics
1826 .iter()
1827 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1828 .collect(),
1829 }
1830 }
1831
1832 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1833 if let Some(indent_sizes) = self.compute_autoindents() {
1834 let indent_sizes = cx.background_spawn(indent_sizes);
1835 match cx
1836 .background_executor()
1837 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1838 {
1839 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1840 Err(indent_sizes) => {
1841 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1842 let indent_sizes = indent_sizes.await;
1843 this.update(cx, |this, cx| {
1844 this.apply_autoindents(indent_sizes, cx);
1845 })
1846 .ok();
1847 }));
1848 }
1849 }
1850 } else {
1851 self.autoindent_requests.clear();
1852 for tx in self.wait_for_autoindent_txs.drain(..) {
1853 tx.send(()).ok();
1854 }
1855 }
1856 }
1857
1858 fn compute_autoindents(
1859 &self,
1860 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1861 let max_rows_between_yields = 100;
1862 let snapshot = self.snapshot();
1863 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1864 return None;
1865 }
1866
1867 let autoindent_requests = self.autoindent_requests.clone();
1868 Some(async move {
1869 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1870 for request in autoindent_requests {
1871 // Resolve each edited range to its row in the current buffer and in the
1872 // buffer before this batch of edits.
1873 let mut row_ranges = Vec::new();
1874 let mut old_to_new_rows = BTreeMap::new();
1875 let mut language_indent_sizes_by_new_row = Vec::new();
1876 for entry in &request.entries {
1877 let position = entry.range.start;
1878 let new_row = position.to_point(&snapshot).row;
1879 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1880 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1881
1882 if !entry.first_line_is_new {
1883 let old_row = position.to_point(&request.before_edit).row;
1884 old_to_new_rows.insert(old_row, new_row);
1885 }
1886 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1887 }
1888
1889 // Build a map containing the suggested indentation for each of the edited lines
1890 // with respect to the state of the buffer before these edits. This map is keyed
1891 // by the rows for these lines in the current state of the buffer.
1892 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1893 let old_edited_ranges =
1894 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1895 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1896 let mut language_indent_size = IndentSize::default();
1897 for old_edited_range in old_edited_ranges {
1898 let suggestions = request
1899 .before_edit
1900 .suggest_autoindents(old_edited_range.clone())
1901 .into_iter()
1902 .flatten();
1903 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1904 if let Some(suggestion) = suggestion {
1905 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1906
1907 // Find the indent size based on the language for this row.
1908 while let Some((row, size)) = language_indent_sizes.peek() {
1909 if *row > new_row {
1910 break;
1911 }
1912 language_indent_size = *size;
1913 language_indent_sizes.next();
1914 }
1915
1916 let suggested_indent = old_to_new_rows
1917 .get(&suggestion.basis_row)
1918 .and_then(|from_row| {
1919 Some(old_suggestions.get(from_row).copied()?.0)
1920 })
1921 .unwrap_or_else(|| {
1922 request
1923 .before_edit
1924 .indent_size_for_line(suggestion.basis_row)
1925 })
1926 .with_delta(suggestion.delta, language_indent_size);
1927 old_suggestions
1928 .insert(new_row, (suggested_indent, suggestion.within_error));
1929 }
1930 }
1931 yield_now().await;
1932 }
1933
1934 // Compute new suggestions for each line, but only include them in the result
1935 // if they differ from the old suggestion for that line.
1936 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1937 let mut language_indent_size = IndentSize::default();
1938 for (row_range, original_indent_column) in row_ranges {
1939 let new_edited_row_range = if request.is_block_mode {
1940 row_range.start..row_range.start + 1
1941 } else {
1942 row_range.clone()
1943 };
1944
1945 let suggestions = snapshot
1946 .suggest_autoindents(new_edited_row_range.clone())
1947 .into_iter()
1948 .flatten();
1949 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1950 if let Some(suggestion) = suggestion {
1951 // Find the indent size based on the language for this row.
1952 while let Some((row, size)) = language_indent_sizes.peek() {
1953 if *row > new_row {
1954 break;
1955 }
1956 language_indent_size = *size;
1957 language_indent_sizes.next();
1958 }
1959
1960 let suggested_indent = indent_sizes
1961 .get(&suggestion.basis_row)
1962 .copied()
1963 .map(|e| e.0)
1964 .unwrap_or_else(|| {
1965 snapshot.indent_size_for_line(suggestion.basis_row)
1966 })
1967 .with_delta(suggestion.delta, language_indent_size);
1968
1969 if old_suggestions.get(&new_row).is_none_or(
1970 |(old_indentation, was_within_error)| {
1971 suggested_indent != *old_indentation
1972 && (!suggestion.within_error || *was_within_error)
1973 },
1974 ) {
1975 indent_sizes.insert(
1976 new_row,
1977 (suggested_indent, request.ignore_empty_lines),
1978 );
1979 }
1980 }
1981 }
1982
1983 if let (true, Some(original_indent_column)) =
1984 (request.is_block_mode, original_indent_column)
1985 {
1986 let new_indent =
1987 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1988 *indent
1989 } else {
1990 snapshot.indent_size_for_line(row_range.start)
1991 };
1992 let delta = new_indent.len as i64 - original_indent_column as i64;
1993 if delta != 0 {
1994 for row in row_range.skip(1) {
1995 indent_sizes.entry(row).or_insert_with(|| {
1996 let mut size = snapshot.indent_size_for_line(row);
1997 if size.kind == new_indent.kind {
1998 match delta.cmp(&0) {
1999 Ordering::Greater => size.len += delta as u32,
2000 Ordering::Less => {
2001 size.len = size.len.saturating_sub(-delta as u32)
2002 }
2003 Ordering::Equal => {}
2004 }
2005 }
2006 (size, request.ignore_empty_lines)
2007 });
2008 }
2009 }
2010 }
2011
2012 yield_now().await;
2013 }
2014 }
2015
2016 indent_sizes
2017 .into_iter()
2018 .filter_map(|(row, (indent, ignore_empty_lines))| {
2019 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2020 None
2021 } else {
2022 Some((row, indent))
2023 }
2024 })
2025 .collect()
2026 })
2027 }
2028
2029 fn apply_autoindents(
2030 &mut self,
2031 indent_sizes: BTreeMap<u32, IndentSize>,
2032 cx: &mut Context<Self>,
2033 ) {
2034 self.autoindent_requests.clear();
2035 for tx in self.wait_for_autoindent_txs.drain(..) {
2036 tx.send(()).ok();
2037 }
2038
2039 let edits: Vec<_> = indent_sizes
2040 .into_iter()
2041 .filter_map(|(row, indent_size)| {
2042 let current_size = indent_size_for_line(self, row);
2043 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2044 })
2045 .collect();
2046
2047 let preserve_preview = self.preserve_preview();
2048 self.edit(edits, None, cx);
2049 if preserve_preview {
2050 self.refresh_preview();
2051 }
2052 }
2053
2054 /// Create a minimal edit that will cause the given row to be indented
2055 /// with the given size. After applying this edit, the length of the line
2056 /// will always be at least `new_size.len`.
2057 pub fn edit_for_indent_size_adjustment(
2058 row: u32,
2059 current_size: IndentSize,
2060 new_size: IndentSize,
2061 ) -> Option<(Range<Point>, String)> {
2062 if new_size.kind == current_size.kind {
2063 match new_size.len.cmp(¤t_size.len) {
2064 Ordering::Greater => {
2065 let point = Point::new(row, 0);
2066 Some((
2067 point..point,
2068 iter::repeat(new_size.char())
2069 .take((new_size.len - current_size.len) as usize)
2070 .collect::<String>(),
2071 ))
2072 }
2073
2074 Ordering::Less => Some((
2075 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2076 String::new(),
2077 )),
2078
2079 Ordering::Equal => None,
2080 }
2081 } else {
2082 Some((
2083 Point::new(row, 0)..Point::new(row, current_size.len),
2084 iter::repeat(new_size.char())
2085 .take(new_size.len as usize)
2086 .collect::<String>(),
2087 ))
2088 }
2089 }
2090
2091 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2092 /// and the given new text.
2093 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2094 let old_text = self.as_rope().clone();
2095 let base_version = self.version();
2096 cx.background_executor()
2097 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2098 let old_text = old_text.to_string();
2099 let line_ending = LineEnding::detect(&new_text);
2100 LineEnding::normalize(&mut new_text);
2101 let edits = text_diff(&old_text, &new_text);
2102 Diff {
2103 base_version,
2104 line_ending,
2105 edits,
2106 }
2107 })
2108 }
2109
2110 /// Spawns a background task that searches the buffer for any whitespace
2111 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2112 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2113 let old_text = self.as_rope().clone();
2114 let line_ending = self.line_ending();
2115 let base_version = self.version();
2116 cx.background_spawn(async move {
2117 let ranges = trailing_whitespace_ranges(&old_text);
2118 let empty = Arc::<str>::from("");
2119 Diff {
2120 base_version,
2121 line_ending,
2122 edits: ranges
2123 .into_iter()
2124 .map(|range| (range, empty.clone()))
2125 .collect(),
2126 }
2127 })
2128 }
2129
2130 /// Ensures that the buffer ends with a single newline character, and
2131 /// no other whitespace. Skips if the buffer is empty.
2132 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2133 let len = self.len();
2134 if len == 0 {
2135 return;
2136 }
2137 let mut offset = len;
2138 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2139 let non_whitespace_len = chunk
2140 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2141 .len();
2142 offset -= chunk.len();
2143 offset += non_whitespace_len;
2144 if non_whitespace_len != 0 {
2145 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2146 return;
2147 }
2148 break;
2149 }
2150 }
2151 self.edit([(offset..len, "\n")], None, cx);
2152 }
2153
2154 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2155 /// calculated, then adjust the diff to account for those changes, and discard any
2156 /// parts of the diff that conflict with those changes.
2157 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2158 let snapshot = self.snapshot();
2159 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2160 let mut delta = 0;
2161 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2162 while let Some(edit_since) = edits_since.peek() {
2163 // If the edit occurs after a diff hunk, then it does not
2164 // affect that hunk.
2165 if edit_since.old.start > range.end {
2166 break;
2167 }
2168 // If the edit precedes the diff hunk, then adjust the hunk
2169 // to reflect the edit.
2170 else if edit_since.old.end < range.start {
2171 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2172 edits_since.next();
2173 }
2174 // If the edit intersects a diff hunk, then discard that hunk.
2175 else {
2176 return None;
2177 }
2178 }
2179
2180 let start = (range.start as i64 + delta) as usize;
2181 let end = (range.end as i64 + delta) as usize;
2182 Some((start..end, new_text))
2183 });
2184
2185 self.start_transaction();
2186 self.text.set_line_ending(diff.line_ending);
2187 self.edit(adjusted_edits, None, cx);
2188 self.end_transaction(cx)
2189 }
2190
2191 pub fn has_unsaved_edits(&self) -> bool {
2192 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2193
2194 if last_version == self.version {
2195 self.has_unsaved_edits
2196 .set((last_version, has_unsaved_edits));
2197 return has_unsaved_edits;
2198 }
2199
2200 let has_edits = self.has_edits_since(&self.saved_version);
2201 self.has_unsaved_edits
2202 .set((self.version.clone(), has_edits));
2203 has_edits
2204 }
2205
2206 /// Checks if the buffer has unsaved changes.
2207 pub fn is_dirty(&self) -> bool {
2208 if self.capability == Capability::ReadOnly {
2209 return false;
2210 }
2211 if self.has_conflict {
2212 return true;
2213 }
2214 match self.file.as_ref().map(|f| f.disk_state()) {
2215 Some(DiskState::New) | Some(DiskState::Deleted) => {
2216 !self.is_empty() && self.has_unsaved_edits()
2217 }
2218 _ => self.has_unsaved_edits(),
2219 }
2220 }
2221
2222 /// Marks the buffer as having a conflict regardless of current buffer state.
2223 pub fn set_conflict(&mut self) {
2224 self.has_conflict = true;
2225 }
2226
2227 /// Checks if the buffer and its file have both changed since the buffer
2228 /// was last saved or reloaded.
2229 pub fn has_conflict(&self) -> bool {
2230 if self.has_conflict {
2231 return true;
2232 }
2233 let Some(file) = self.file.as_ref() else {
2234 return false;
2235 };
2236 match file.disk_state() {
2237 DiskState::New => false,
2238 DiskState::Present { mtime } => match self.saved_mtime {
2239 Some(saved_mtime) => {
2240 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2241 }
2242 None => true,
2243 },
2244 DiskState::Deleted => false,
2245 }
2246 }
2247
2248 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2249 pub fn subscribe(&mut self) -> Subscription<usize> {
2250 self.text.subscribe()
2251 }
2252
2253 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2254 ///
2255 /// This allows downstream code to check if the buffer's text has changed without
2256 /// waiting for an effect cycle, which would be required if using eents.
2257 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2258 if let Err(ix) = self
2259 .change_bits
2260 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2261 {
2262 self.change_bits.insert(ix, bit);
2263 }
2264 }
2265
2266 /// Set the change bit for all "listeners".
2267 fn was_changed(&mut self) {
2268 self.change_bits.retain(|change_bit| {
2269 change_bit
2270 .upgrade()
2271 .inspect(|bit| {
2272 _ = bit.replace(true);
2273 })
2274 .is_some()
2275 });
2276 }
2277
2278 /// Starts a transaction, if one is not already in-progress. When undoing or
2279 /// redoing edits, all of the edits performed within a transaction are undone
2280 /// or redone together.
2281 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2282 self.start_transaction_at(Instant::now())
2283 }
2284
2285 /// Starts a transaction, providing the current time. Subsequent transactions
2286 /// that occur within a short period of time will be grouped together. This
2287 /// is controlled by the buffer's undo grouping duration.
2288 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2289 self.transaction_depth += 1;
2290 if self.was_dirty_before_starting_transaction.is_none() {
2291 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2292 }
2293 self.text.start_transaction_at(now)
2294 }
2295
2296 /// Terminates the current transaction, if this is the outermost transaction.
2297 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2298 self.end_transaction_at(Instant::now(), cx)
2299 }
2300
2301 /// Terminates the current transaction, providing the current time. Subsequent transactions
2302 /// that occur within a short period of time will be grouped together. This
2303 /// is controlled by the buffer's undo grouping duration.
2304 pub fn end_transaction_at(
2305 &mut self,
2306 now: Instant,
2307 cx: &mut Context<Self>,
2308 ) -> Option<TransactionId> {
2309 assert!(self.transaction_depth > 0);
2310 self.transaction_depth -= 1;
2311 let was_dirty = if self.transaction_depth == 0 {
2312 self.was_dirty_before_starting_transaction.take().unwrap()
2313 } else {
2314 false
2315 };
2316 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2317 self.did_edit(&start_version, was_dirty, cx);
2318 Some(transaction_id)
2319 } else {
2320 None
2321 }
2322 }
2323
2324 /// Manually add a transaction to the buffer's undo history.
2325 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2326 self.text.push_transaction(transaction, now);
2327 }
2328
2329 /// Differs from `push_transaction` in that it does not clear the redo
2330 /// stack. Intended to be used to create a parent transaction to merge
2331 /// potential child transactions into.
2332 ///
2333 /// The caller is responsible for removing it from the undo history using
2334 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2335 /// are merged into this transaction, the caller is responsible for ensuring
2336 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2337 /// cleared is to create transactions with the usual `start_transaction` and
2338 /// `end_transaction` methods and merging the resulting transactions into
2339 /// the transaction created by this method
2340 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2341 self.text.push_empty_transaction(now)
2342 }
2343
2344 /// Prevent the last transaction from being grouped with any subsequent transactions,
2345 /// even if they occur with the buffer's undo grouping duration.
2346 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2347 self.text.finalize_last_transaction()
2348 }
2349
2350 /// Manually group all changes since a given transaction.
2351 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2352 self.text.group_until_transaction(transaction_id);
2353 }
2354
2355 /// Manually remove a transaction from the buffer's undo history
2356 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2357 self.text.forget_transaction(transaction_id)
2358 }
2359
2360 /// Retrieve a transaction from the buffer's undo history
2361 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2362 self.text.get_transaction(transaction_id)
2363 }
2364
2365 /// Manually merge two transactions in the buffer's undo history.
2366 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2367 self.text.merge_transactions(transaction, destination);
2368 }
2369
2370 /// Waits for the buffer to receive operations with the given timestamps.
2371 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2372 &mut self,
2373 edit_ids: It,
2374 ) -> impl Future<Output = Result<()>> + use<It> {
2375 self.text.wait_for_edits(edit_ids)
2376 }
2377
2378 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2379 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2380 &mut self,
2381 anchors: It,
2382 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2383 self.text.wait_for_anchors(anchors)
2384 }
2385
2386 /// Waits for the buffer to receive operations up to the given version.
2387 pub fn wait_for_version(
2388 &mut self,
2389 version: clock::Global,
2390 ) -> impl Future<Output = Result<()>> + use<> {
2391 self.text.wait_for_version(version)
2392 }
2393
2394 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2395 /// [`Buffer::wait_for_version`] to resolve with an error.
2396 pub fn give_up_waiting(&mut self) {
2397 self.text.give_up_waiting();
2398 }
2399
2400 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2401 let mut rx = None;
2402 if !self.autoindent_requests.is_empty() {
2403 let channel = oneshot::channel();
2404 self.wait_for_autoindent_txs.push(channel.0);
2405 rx = Some(channel.1);
2406 }
2407 rx
2408 }
2409
2410 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2411 pub fn set_active_selections(
2412 &mut self,
2413 selections: Arc<[Selection<Anchor>]>,
2414 line_mode: bool,
2415 cursor_shape: CursorShape,
2416 cx: &mut Context<Self>,
2417 ) {
2418 let lamport_timestamp = self.text.lamport_clock.tick();
2419 self.remote_selections.insert(
2420 self.text.replica_id(),
2421 SelectionSet {
2422 selections: selections.clone(),
2423 lamport_timestamp,
2424 line_mode,
2425 cursor_shape,
2426 },
2427 );
2428 self.send_operation(
2429 Operation::UpdateSelections {
2430 selections,
2431 line_mode,
2432 lamport_timestamp,
2433 cursor_shape,
2434 },
2435 true,
2436 cx,
2437 );
2438 self.non_text_state_update_count += 1;
2439 cx.notify();
2440 }
2441
2442 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2443 /// this replica.
2444 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2445 if self
2446 .remote_selections
2447 .get(&self.text.replica_id())
2448 .is_none_or(|set| !set.selections.is_empty())
2449 {
2450 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2451 }
2452 }
2453
2454 pub fn set_agent_selections(
2455 &mut self,
2456 selections: Arc<[Selection<Anchor>]>,
2457 line_mode: bool,
2458 cursor_shape: CursorShape,
2459 cx: &mut Context<Self>,
2460 ) {
2461 let lamport_timestamp = self.text.lamport_clock.tick();
2462 self.remote_selections.insert(
2463 ReplicaId::AGENT,
2464 SelectionSet {
2465 selections,
2466 lamport_timestamp,
2467 line_mode,
2468 cursor_shape,
2469 },
2470 );
2471 self.non_text_state_update_count += 1;
2472 cx.notify();
2473 }
2474
2475 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2476 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2477 }
2478
2479 /// Replaces the buffer's entire text.
2480 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2481 where
2482 T: Into<Arc<str>>,
2483 {
2484 self.autoindent_requests.clear();
2485 self.edit([(0..self.len(), text)], None, cx)
2486 }
2487
2488 /// Appends the given text to the end of the buffer.
2489 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2490 where
2491 T: Into<Arc<str>>,
2492 {
2493 self.edit([(self.len()..self.len(), text)], None, cx)
2494 }
2495
2496 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2497 /// delete, and a string of text to insert at that location.
2498 ///
2499 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2500 /// request for the edited ranges, which will be processed when the buffer finishes
2501 /// parsing.
2502 ///
2503 /// Parsing takes place at the end of a transaction, and may compute synchronously
2504 /// or asynchronously, depending on the changes.
2505 pub fn edit<I, S, T>(
2506 &mut self,
2507 edits_iter: I,
2508 autoindent_mode: Option<AutoindentMode>,
2509 cx: &mut Context<Self>,
2510 ) -> Option<clock::Lamport>
2511 where
2512 I: IntoIterator<Item = (Range<S>, T)>,
2513 S: ToOffset,
2514 T: Into<Arc<str>>,
2515 {
2516 // Skip invalid edits and coalesce contiguous ones.
2517 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2518
2519 for (range, new_text) in edits_iter {
2520 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2521
2522 if range.start > range.end {
2523 mem::swap(&mut range.start, &mut range.end);
2524 }
2525 let new_text = new_text.into();
2526 if !new_text.is_empty() || !range.is_empty() {
2527 if let Some((prev_range, prev_text)) = edits.last_mut()
2528 && prev_range.end >= range.start
2529 {
2530 prev_range.end = cmp::max(prev_range.end, range.end);
2531 *prev_text = format!("{prev_text}{new_text}").into();
2532 } else {
2533 edits.push((range, new_text));
2534 }
2535 }
2536 }
2537 if edits.is_empty() {
2538 return None;
2539 }
2540
2541 self.start_transaction();
2542 self.pending_autoindent.take();
2543 let autoindent_request = autoindent_mode
2544 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2545
2546 let edit_operation = self.text.edit(edits.iter().cloned());
2547 let edit_id = edit_operation.timestamp();
2548
2549 if let Some((before_edit, mode)) = autoindent_request {
2550 let mut delta = 0isize;
2551 let mut previous_setting = None;
2552 let entries: Vec<_> = edits
2553 .into_iter()
2554 .enumerate()
2555 .zip(&edit_operation.as_edit().unwrap().new_text)
2556 .filter(|((_, (range, _)), _)| {
2557 let language = before_edit.language_at(range.start);
2558 let language_id = language.map(|l| l.id());
2559 if let Some((cached_language_id, auto_indent)) = previous_setting
2560 && cached_language_id == language_id
2561 {
2562 auto_indent
2563 } else {
2564 // The auto-indent setting is not present in editorconfigs, hence
2565 // we can avoid passing the file here.
2566 let auto_indent =
2567 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2568 previous_setting = Some((language_id, auto_indent));
2569 auto_indent
2570 }
2571 })
2572 .map(|((ix, (range, _)), new_text)| {
2573 let new_text_length = new_text.len();
2574 let old_start = range.start.to_point(&before_edit);
2575 let new_start = (delta + range.start as isize) as usize;
2576 let range_len = range.end - range.start;
2577 delta += new_text_length as isize - range_len as isize;
2578
2579 // Decide what range of the insertion to auto-indent, and whether
2580 // the first line of the insertion should be considered a newly-inserted line
2581 // or an edit to an existing line.
2582 let mut range_of_insertion_to_indent = 0..new_text_length;
2583 let mut first_line_is_new = true;
2584
2585 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2586 let old_line_end = before_edit.line_len(old_start.row);
2587
2588 if old_start.column > old_line_start {
2589 first_line_is_new = false;
2590 }
2591
2592 if !new_text.contains('\n')
2593 && (old_start.column + (range_len as u32) < old_line_end
2594 || old_line_end == old_line_start)
2595 {
2596 first_line_is_new = false;
2597 }
2598
2599 // When inserting text starting with a newline, avoid auto-indenting the
2600 // previous line.
2601 if new_text.starts_with('\n') {
2602 range_of_insertion_to_indent.start += 1;
2603 first_line_is_new = true;
2604 }
2605
2606 let mut original_indent_column = None;
2607 if let AutoindentMode::Block {
2608 original_indent_columns,
2609 } = &mode
2610 {
2611 original_indent_column = Some(if new_text.starts_with('\n') {
2612 indent_size_for_text(
2613 new_text[range_of_insertion_to_indent.clone()].chars(),
2614 )
2615 .len
2616 } else {
2617 original_indent_columns
2618 .get(ix)
2619 .copied()
2620 .flatten()
2621 .unwrap_or_else(|| {
2622 indent_size_for_text(
2623 new_text[range_of_insertion_to_indent.clone()].chars(),
2624 )
2625 .len
2626 })
2627 });
2628
2629 // Avoid auto-indenting the line after the edit.
2630 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2631 range_of_insertion_to_indent.end -= 1;
2632 }
2633 }
2634
2635 AutoindentRequestEntry {
2636 first_line_is_new,
2637 original_indent_column,
2638 indent_size: before_edit.language_indent_size_at(range.start, cx),
2639 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2640 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2641 }
2642 })
2643 .collect();
2644
2645 if !entries.is_empty() {
2646 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2647 before_edit,
2648 entries,
2649 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2650 ignore_empty_lines: false,
2651 }));
2652 }
2653 }
2654
2655 self.end_transaction(cx);
2656 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2657 Some(edit_id)
2658 }
2659
2660 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2661 self.was_changed();
2662
2663 if self.edits_since::<usize>(old_version).next().is_none() {
2664 return;
2665 }
2666
2667 self.reparse(cx, true);
2668 cx.emit(BufferEvent::Edited);
2669 if was_dirty != self.is_dirty() {
2670 cx.emit(BufferEvent::DirtyChanged);
2671 }
2672 cx.notify();
2673 }
2674
2675 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2676 where
2677 I: IntoIterator<Item = Range<T>>,
2678 T: ToOffset + Copy,
2679 {
2680 let before_edit = self.snapshot();
2681 let entries = ranges
2682 .into_iter()
2683 .map(|range| AutoindentRequestEntry {
2684 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2685 first_line_is_new: true,
2686 indent_size: before_edit.language_indent_size_at(range.start, cx),
2687 original_indent_column: None,
2688 })
2689 .collect();
2690 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2691 before_edit,
2692 entries,
2693 is_block_mode: false,
2694 ignore_empty_lines: true,
2695 }));
2696 self.request_autoindent(cx);
2697 }
2698
2699 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2700 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2701 pub fn insert_empty_line(
2702 &mut self,
2703 position: impl ToPoint,
2704 space_above: bool,
2705 space_below: bool,
2706 cx: &mut Context<Self>,
2707 ) -> Point {
2708 let mut position = position.to_point(self);
2709
2710 self.start_transaction();
2711
2712 self.edit(
2713 [(position..position, "\n")],
2714 Some(AutoindentMode::EachLine),
2715 cx,
2716 );
2717
2718 if position.column > 0 {
2719 position += Point::new(1, 0);
2720 }
2721
2722 if !self.is_line_blank(position.row) {
2723 self.edit(
2724 [(position..position, "\n")],
2725 Some(AutoindentMode::EachLine),
2726 cx,
2727 );
2728 }
2729
2730 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2731 self.edit(
2732 [(position..position, "\n")],
2733 Some(AutoindentMode::EachLine),
2734 cx,
2735 );
2736 position.row += 1;
2737 }
2738
2739 if space_below
2740 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2741 {
2742 self.edit(
2743 [(position..position, "\n")],
2744 Some(AutoindentMode::EachLine),
2745 cx,
2746 );
2747 }
2748
2749 self.end_transaction(cx);
2750
2751 position
2752 }
2753
2754 /// Applies the given remote operations to the buffer.
2755 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2756 self.pending_autoindent.take();
2757 let was_dirty = self.is_dirty();
2758 let old_version = self.version.clone();
2759 let mut deferred_ops = Vec::new();
2760 let buffer_ops = ops
2761 .into_iter()
2762 .filter_map(|op| match op {
2763 Operation::Buffer(op) => Some(op),
2764 _ => {
2765 if self.can_apply_op(&op) {
2766 self.apply_op(op, cx);
2767 } else {
2768 deferred_ops.push(op);
2769 }
2770 None
2771 }
2772 })
2773 .collect::<Vec<_>>();
2774 for operation in buffer_ops.iter() {
2775 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2776 }
2777 self.text.apply_ops(buffer_ops);
2778 self.deferred_ops.insert(deferred_ops);
2779 self.flush_deferred_ops(cx);
2780 self.did_edit(&old_version, was_dirty, cx);
2781 // Notify independently of whether the buffer was edited as the operations could include a
2782 // selection update.
2783 cx.notify();
2784 }
2785
2786 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2787 let mut deferred_ops = Vec::new();
2788 for op in self.deferred_ops.drain().iter().cloned() {
2789 if self.can_apply_op(&op) {
2790 self.apply_op(op, cx);
2791 } else {
2792 deferred_ops.push(op);
2793 }
2794 }
2795 self.deferred_ops.insert(deferred_ops);
2796 }
2797
2798 pub fn has_deferred_ops(&self) -> bool {
2799 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2800 }
2801
2802 fn can_apply_op(&self, operation: &Operation) -> bool {
2803 match operation {
2804 Operation::Buffer(_) => {
2805 unreachable!("buffer operations should never be applied at this layer")
2806 }
2807 Operation::UpdateDiagnostics {
2808 diagnostics: diagnostic_set,
2809 ..
2810 } => diagnostic_set.iter().all(|diagnostic| {
2811 self.text.can_resolve(&diagnostic.range.start)
2812 && self.text.can_resolve(&diagnostic.range.end)
2813 }),
2814 Operation::UpdateSelections { selections, .. } => selections
2815 .iter()
2816 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2817 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2818 }
2819 }
2820
2821 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2822 match operation {
2823 Operation::Buffer(_) => {
2824 unreachable!("buffer operations should never be applied at this layer")
2825 }
2826 Operation::UpdateDiagnostics {
2827 server_id,
2828 diagnostics: diagnostic_set,
2829 lamport_timestamp,
2830 } => {
2831 let snapshot = self.snapshot();
2832 self.apply_diagnostic_update(
2833 server_id,
2834 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2835 lamport_timestamp,
2836 cx,
2837 );
2838 }
2839 Operation::UpdateSelections {
2840 selections,
2841 lamport_timestamp,
2842 line_mode,
2843 cursor_shape,
2844 } => {
2845 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2846 && set.lamport_timestamp > lamport_timestamp
2847 {
2848 return;
2849 }
2850
2851 self.remote_selections.insert(
2852 lamport_timestamp.replica_id,
2853 SelectionSet {
2854 selections,
2855 lamport_timestamp,
2856 line_mode,
2857 cursor_shape,
2858 },
2859 );
2860 self.text.lamport_clock.observe(lamport_timestamp);
2861 self.non_text_state_update_count += 1;
2862 }
2863 Operation::UpdateCompletionTriggers {
2864 triggers,
2865 lamport_timestamp,
2866 server_id,
2867 } => {
2868 if triggers.is_empty() {
2869 self.completion_triggers_per_language_server
2870 .remove(&server_id);
2871 self.completion_triggers = self
2872 .completion_triggers_per_language_server
2873 .values()
2874 .flat_map(|triggers| triggers.iter().cloned())
2875 .collect();
2876 } else {
2877 self.completion_triggers_per_language_server
2878 .insert(server_id, triggers.iter().cloned().collect());
2879 self.completion_triggers.extend(triggers);
2880 }
2881 self.text.lamport_clock.observe(lamport_timestamp);
2882 }
2883 Operation::UpdateLineEnding {
2884 line_ending,
2885 lamport_timestamp,
2886 } => {
2887 self.text.set_line_ending(line_ending);
2888 self.text.lamport_clock.observe(lamport_timestamp);
2889 }
2890 }
2891 }
2892
2893 fn apply_diagnostic_update(
2894 &mut self,
2895 server_id: LanguageServerId,
2896 diagnostics: DiagnosticSet,
2897 lamport_timestamp: clock::Lamport,
2898 cx: &mut Context<Self>,
2899 ) {
2900 if lamport_timestamp > self.diagnostics_timestamp {
2901 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2902 if diagnostics.is_empty() {
2903 if let Ok(ix) = ix {
2904 self.diagnostics.remove(ix);
2905 }
2906 } else {
2907 match ix {
2908 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2909 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2910 };
2911 }
2912 self.diagnostics_timestamp = lamport_timestamp;
2913 self.non_text_state_update_count += 1;
2914 self.text.lamport_clock.observe(lamport_timestamp);
2915 cx.notify();
2916 cx.emit(BufferEvent::DiagnosticsUpdated);
2917 }
2918 }
2919
2920 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2921 self.was_changed();
2922 cx.emit(BufferEvent::Operation {
2923 operation,
2924 is_local,
2925 });
2926 }
2927
2928 /// Removes the selections for a given peer.
2929 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2930 self.remote_selections.remove(&replica_id);
2931 cx.notify();
2932 }
2933
2934 /// Undoes the most recent transaction.
2935 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2936 let was_dirty = self.is_dirty();
2937 let old_version = self.version.clone();
2938
2939 if let Some((transaction_id, operation)) = self.text.undo() {
2940 self.send_operation(Operation::Buffer(operation), true, cx);
2941 self.did_edit(&old_version, was_dirty, cx);
2942 Some(transaction_id)
2943 } else {
2944 None
2945 }
2946 }
2947
2948 /// Manually undoes a specific transaction in the buffer's undo history.
2949 pub fn undo_transaction(
2950 &mut self,
2951 transaction_id: TransactionId,
2952 cx: &mut Context<Self>,
2953 ) -> bool {
2954 let was_dirty = self.is_dirty();
2955 let old_version = self.version.clone();
2956 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2957 self.send_operation(Operation::Buffer(operation), true, cx);
2958 self.did_edit(&old_version, was_dirty, cx);
2959 true
2960 } else {
2961 false
2962 }
2963 }
2964
2965 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2966 pub fn undo_to_transaction(
2967 &mut self,
2968 transaction_id: TransactionId,
2969 cx: &mut Context<Self>,
2970 ) -> bool {
2971 let was_dirty = self.is_dirty();
2972 let old_version = self.version.clone();
2973
2974 let operations = self.text.undo_to_transaction(transaction_id);
2975 let undone = !operations.is_empty();
2976 for operation in operations {
2977 self.send_operation(Operation::Buffer(operation), true, cx);
2978 }
2979 if undone {
2980 self.did_edit(&old_version, was_dirty, cx)
2981 }
2982 undone
2983 }
2984
2985 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2986 let was_dirty = self.is_dirty();
2987 let operation = self.text.undo_operations(counts);
2988 let old_version = self.version.clone();
2989 self.send_operation(Operation::Buffer(operation), true, cx);
2990 self.did_edit(&old_version, was_dirty, cx);
2991 }
2992
2993 /// Manually redoes a specific transaction in the buffer's redo history.
2994 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2995 let was_dirty = self.is_dirty();
2996 let old_version = self.version.clone();
2997
2998 if let Some((transaction_id, operation)) = self.text.redo() {
2999 self.send_operation(Operation::Buffer(operation), true, cx);
3000 self.did_edit(&old_version, was_dirty, cx);
3001 Some(transaction_id)
3002 } else {
3003 None
3004 }
3005 }
3006
3007 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3008 pub fn redo_to_transaction(
3009 &mut self,
3010 transaction_id: TransactionId,
3011 cx: &mut Context<Self>,
3012 ) -> bool {
3013 let was_dirty = self.is_dirty();
3014 let old_version = self.version.clone();
3015
3016 let operations = self.text.redo_to_transaction(transaction_id);
3017 let redone = !operations.is_empty();
3018 for operation in operations {
3019 self.send_operation(Operation::Buffer(operation), true, cx);
3020 }
3021 if redone {
3022 self.did_edit(&old_version, was_dirty, cx)
3023 }
3024 redone
3025 }
3026
3027 /// Override current completion triggers with the user-provided completion triggers.
3028 pub fn set_completion_triggers(
3029 &mut self,
3030 server_id: LanguageServerId,
3031 triggers: BTreeSet<String>,
3032 cx: &mut Context<Self>,
3033 ) {
3034 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3035 if triggers.is_empty() {
3036 self.completion_triggers_per_language_server
3037 .remove(&server_id);
3038 self.completion_triggers = self
3039 .completion_triggers_per_language_server
3040 .values()
3041 .flat_map(|triggers| triggers.iter().cloned())
3042 .collect();
3043 } else {
3044 self.completion_triggers_per_language_server
3045 .insert(server_id, triggers.clone());
3046 self.completion_triggers.extend(triggers.iter().cloned());
3047 }
3048 self.send_operation(
3049 Operation::UpdateCompletionTriggers {
3050 triggers: triggers.into_iter().collect(),
3051 lamport_timestamp: self.completion_triggers_timestamp,
3052 server_id,
3053 },
3054 true,
3055 cx,
3056 );
3057 cx.notify();
3058 }
3059
3060 /// Returns a list of strings which trigger a completion menu for this language.
3061 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3062 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3063 &self.completion_triggers
3064 }
3065
3066 /// Call this directly after performing edits to prevent the preview tab
3067 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3068 /// to return false until there are additional edits.
3069 pub fn refresh_preview(&mut self) {
3070 self.preview_version = self.version.clone();
3071 }
3072
3073 /// Whether we should preserve the preview status of a tab containing this buffer.
3074 pub fn preserve_preview(&self) -> bool {
3075 !self.has_edits_since(&self.preview_version)
3076 }
3077}
3078
3079#[doc(hidden)]
3080#[cfg(any(test, feature = "test-support"))]
3081impl Buffer {
3082 pub fn edit_via_marked_text(
3083 &mut self,
3084 marked_string: &str,
3085 autoindent_mode: Option<AutoindentMode>,
3086 cx: &mut Context<Self>,
3087 ) {
3088 let edits = self.edits_for_marked_text(marked_string);
3089 self.edit(edits, autoindent_mode, cx);
3090 }
3091
3092 pub fn set_group_interval(&mut self, group_interval: Duration) {
3093 self.text.set_group_interval(group_interval);
3094 }
3095
3096 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3097 where
3098 T: rand::Rng,
3099 {
3100 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3101 let mut last_end = None;
3102 for _ in 0..old_range_count {
3103 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3104 break;
3105 }
3106
3107 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3108 let mut range = self.random_byte_range(new_start, rng);
3109 if rng.random_bool(0.2) {
3110 mem::swap(&mut range.start, &mut range.end);
3111 }
3112 last_end = Some(range.end);
3113
3114 let new_text_len = rng.random_range(0..10);
3115 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3116 new_text = new_text.to_uppercase();
3117
3118 edits.push((range, new_text));
3119 }
3120 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3121 self.edit(edits, None, cx);
3122 }
3123
3124 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3125 let was_dirty = self.is_dirty();
3126 let old_version = self.version.clone();
3127
3128 let ops = self.text.randomly_undo_redo(rng);
3129 if !ops.is_empty() {
3130 for op in ops {
3131 self.send_operation(Operation::Buffer(op), true, cx);
3132 self.did_edit(&old_version, was_dirty, cx);
3133 }
3134 }
3135 }
3136}
3137
3138impl EventEmitter<BufferEvent> for Buffer {}
3139
3140impl Deref for Buffer {
3141 type Target = TextBuffer;
3142
3143 fn deref(&self) -> &Self::Target {
3144 &self.text
3145 }
3146}
3147
3148impl BufferSnapshot {
3149 /// Returns [`IndentSize`] for a given line that respects user settings and
3150 /// language preferences.
3151 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3152 indent_size_for_line(self, row)
3153 }
3154
3155 /// Returns [`IndentSize`] for a given position that respects user settings
3156 /// and language preferences.
3157 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3158 let settings = language_settings(
3159 self.language_at(position).map(|l| l.name()),
3160 self.file(),
3161 cx,
3162 );
3163 if settings.hard_tabs {
3164 IndentSize::tab()
3165 } else {
3166 IndentSize::spaces(settings.tab_size.get())
3167 }
3168 }
3169
3170 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3171 /// is passed in as `single_indent_size`.
3172 pub fn suggested_indents(
3173 &self,
3174 rows: impl Iterator<Item = u32>,
3175 single_indent_size: IndentSize,
3176 ) -> BTreeMap<u32, IndentSize> {
3177 let mut result = BTreeMap::new();
3178
3179 for row_range in contiguous_ranges(rows, 10) {
3180 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3181 Some(suggestions) => suggestions,
3182 _ => break,
3183 };
3184
3185 for (row, suggestion) in row_range.zip(suggestions) {
3186 let indent_size = if let Some(suggestion) = suggestion {
3187 result
3188 .get(&suggestion.basis_row)
3189 .copied()
3190 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3191 .with_delta(suggestion.delta, single_indent_size)
3192 } else {
3193 self.indent_size_for_line(row)
3194 };
3195
3196 result.insert(row, indent_size);
3197 }
3198 }
3199
3200 result
3201 }
3202
3203 fn suggest_autoindents(
3204 &self,
3205 row_range: Range<u32>,
3206 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3207 let config = &self.language.as_ref()?.config;
3208 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3209
3210 #[derive(Debug, Clone)]
3211 struct StartPosition {
3212 start: Point,
3213 suffix: SharedString,
3214 }
3215
3216 // Find the suggested indentation ranges based on the syntax tree.
3217 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3218 let end = Point::new(row_range.end, 0);
3219 let range = (start..end).to_offset(&self.text);
3220 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3221 Some(&grammar.indents_config.as_ref()?.query)
3222 });
3223 let indent_configs = matches
3224 .grammars()
3225 .iter()
3226 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3227 .collect::<Vec<_>>();
3228
3229 let mut indent_ranges = Vec::<Range<Point>>::new();
3230 let mut start_positions = Vec::<StartPosition>::new();
3231 let mut outdent_positions = Vec::<Point>::new();
3232 while let Some(mat) = matches.peek() {
3233 let mut start: Option<Point> = None;
3234 let mut end: Option<Point> = None;
3235
3236 let config = indent_configs[mat.grammar_index];
3237 for capture in mat.captures {
3238 if capture.index == config.indent_capture_ix {
3239 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3240 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3241 } else if Some(capture.index) == config.start_capture_ix {
3242 start = Some(Point::from_ts_point(capture.node.end_position()));
3243 } else if Some(capture.index) == config.end_capture_ix {
3244 end = Some(Point::from_ts_point(capture.node.start_position()));
3245 } else if Some(capture.index) == config.outdent_capture_ix {
3246 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3247 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3248 start_positions.push(StartPosition {
3249 start: Point::from_ts_point(capture.node.start_position()),
3250 suffix: suffix.clone(),
3251 });
3252 }
3253 }
3254
3255 matches.advance();
3256 if let Some((start, end)) = start.zip(end) {
3257 if start.row == end.row {
3258 continue;
3259 }
3260 let range = start..end;
3261 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3262 Err(ix) => indent_ranges.insert(ix, range),
3263 Ok(ix) => {
3264 let prev_range = &mut indent_ranges[ix];
3265 prev_range.end = prev_range.end.max(range.end);
3266 }
3267 }
3268 }
3269 }
3270
3271 let mut error_ranges = Vec::<Range<Point>>::new();
3272 let mut matches = self
3273 .syntax
3274 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3275 while let Some(mat) = matches.peek() {
3276 let node = mat.captures[0].node;
3277 let start = Point::from_ts_point(node.start_position());
3278 let end = Point::from_ts_point(node.end_position());
3279 let range = start..end;
3280 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3281 Ok(ix) | Err(ix) => ix,
3282 };
3283 let mut end_ix = ix;
3284 while let Some(existing_range) = error_ranges.get(end_ix) {
3285 if existing_range.end < end {
3286 end_ix += 1;
3287 } else {
3288 break;
3289 }
3290 }
3291 error_ranges.splice(ix..end_ix, [range]);
3292 matches.advance();
3293 }
3294
3295 outdent_positions.sort();
3296 for outdent_position in outdent_positions {
3297 // find the innermost indent range containing this outdent_position
3298 // set its end to the outdent position
3299 if let Some(range_to_truncate) = indent_ranges
3300 .iter_mut()
3301 .filter(|indent_range| indent_range.contains(&outdent_position))
3302 .next_back()
3303 {
3304 range_to_truncate.end = outdent_position;
3305 }
3306 }
3307
3308 start_positions.sort_by_key(|b| b.start);
3309
3310 // Find the suggested indentation increases and decreased based on regexes.
3311 let mut regex_outdent_map = HashMap::default();
3312 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3313 let mut start_positions_iter = start_positions.iter().peekable();
3314
3315 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3316 self.for_each_line(
3317 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3318 ..Point::new(row_range.end, 0),
3319 |row, line| {
3320 if config
3321 .decrease_indent_pattern
3322 .as_ref()
3323 .is_some_and(|regex| regex.is_match(line))
3324 {
3325 indent_change_rows.push((row, Ordering::Less));
3326 }
3327 if config
3328 .increase_indent_pattern
3329 .as_ref()
3330 .is_some_and(|regex| regex.is_match(line))
3331 {
3332 indent_change_rows.push((row + 1, Ordering::Greater));
3333 }
3334 while let Some(pos) = start_positions_iter.peek() {
3335 if pos.start.row < row {
3336 let pos = start_positions_iter.next().unwrap();
3337 last_seen_suffix
3338 .entry(pos.suffix.to_string())
3339 .or_default()
3340 .push(pos.start);
3341 } else {
3342 break;
3343 }
3344 }
3345 for rule in &config.decrease_indent_patterns {
3346 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3347 let row_start_column = self.indent_size_for_line(row).len;
3348 let basis_row = rule
3349 .valid_after
3350 .iter()
3351 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3352 .flatten()
3353 .filter(|start_point| start_point.column <= row_start_column)
3354 .max_by_key(|start_point| start_point.row);
3355 if let Some(outdent_to_row) = basis_row {
3356 regex_outdent_map.insert(row, outdent_to_row.row);
3357 }
3358 break;
3359 }
3360 }
3361 },
3362 );
3363
3364 let mut indent_changes = indent_change_rows.into_iter().peekable();
3365 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3366 prev_non_blank_row.unwrap_or(0)
3367 } else {
3368 row_range.start.saturating_sub(1)
3369 };
3370
3371 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3372 Some(row_range.map(move |row| {
3373 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3374
3375 let mut indent_from_prev_row = false;
3376 let mut outdent_from_prev_row = false;
3377 let mut outdent_to_row = u32::MAX;
3378 let mut from_regex = false;
3379
3380 while let Some((indent_row, delta)) = indent_changes.peek() {
3381 match indent_row.cmp(&row) {
3382 Ordering::Equal => match delta {
3383 Ordering::Less => {
3384 from_regex = true;
3385 outdent_from_prev_row = true
3386 }
3387 Ordering::Greater => {
3388 indent_from_prev_row = true;
3389 from_regex = true
3390 }
3391 _ => {}
3392 },
3393
3394 Ordering::Greater => break,
3395 Ordering::Less => {}
3396 }
3397
3398 indent_changes.next();
3399 }
3400
3401 for range in &indent_ranges {
3402 if range.start.row >= row {
3403 break;
3404 }
3405 if range.start.row == prev_row && range.end > row_start {
3406 indent_from_prev_row = true;
3407 }
3408 if range.end > prev_row_start && range.end <= row_start {
3409 outdent_to_row = outdent_to_row.min(range.start.row);
3410 }
3411 }
3412
3413 if let Some(basis_row) = regex_outdent_map.get(&row) {
3414 indent_from_prev_row = false;
3415 outdent_to_row = *basis_row;
3416 from_regex = true;
3417 }
3418
3419 let within_error = error_ranges
3420 .iter()
3421 .any(|e| e.start.row < row && e.end > row_start);
3422
3423 let suggestion = if outdent_to_row == prev_row
3424 || (outdent_from_prev_row && indent_from_prev_row)
3425 {
3426 Some(IndentSuggestion {
3427 basis_row: prev_row,
3428 delta: Ordering::Equal,
3429 within_error: within_error && !from_regex,
3430 })
3431 } else if indent_from_prev_row {
3432 Some(IndentSuggestion {
3433 basis_row: prev_row,
3434 delta: Ordering::Greater,
3435 within_error: within_error && !from_regex,
3436 })
3437 } else if outdent_to_row < prev_row {
3438 Some(IndentSuggestion {
3439 basis_row: outdent_to_row,
3440 delta: Ordering::Equal,
3441 within_error: within_error && !from_regex,
3442 })
3443 } else if outdent_from_prev_row {
3444 Some(IndentSuggestion {
3445 basis_row: prev_row,
3446 delta: Ordering::Less,
3447 within_error: within_error && !from_regex,
3448 })
3449 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3450 {
3451 Some(IndentSuggestion {
3452 basis_row: prev_row,
3453 delta: Ordering::Equal,
3454 within_error: within_error && !from_regex,
3455 })
3456 } else {
3457 None
3458 };
3459
3460 prev_row = row;
3461 prev_row_start = row_start;
3462 suggestion
3463 }))
3464 }
3465
3466 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3467 while row > 0 {
3468 row -= 1;
3469 if !self.is_line_blank(row) {
3470 return Some(row);
3471 }
3472 }
3473 None
3474 }
3475
3476 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3477 let captures = self.syntax.captures(range, &self.text, |grammar| {
3478 grammar
3479 .highlights_config
3480 .as_ref()
3481 .map(|config| &config.query)
3482 });
3483 let highlight_maps = captures
3484 .grammars()
3485 .iter()
3486 .map(|grammar| grammar.highlight_map())
3487 .collect();
3488 (captures, highlight_maps)
3489 }
3490
3491 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3492 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3493 /// returned in chunks where each chunk has a single syntax highlighting style and
3494 /// diagnostic status.
3495 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3496 let range = range.start.to_offset(self)..range.end.to_offset(self);
3497
3498 let mut syntax = None;
3499 if language_aware {
3500 syntax = Some(self.get_highlights(range.clone()));
3501 }
3502 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3503 let diagnostics = language_aware;
3504 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3505 }
3506
3507 pub fn highlighted_text_for_range<T: ToOffset>(
3508 &self,
3509 range: Range<T>,
3510 override_style: Option<HighlightStyle>,
3511 syntax_theme: &SyntaxTheme,
3512 ) -> HighlightedText {
3513 HighlightedText::from_buffer_range(
3514 range,
3515 &self.text,
3516 &self.syntax,
3517 override_style,
3518 syntax_theme,
3519 )
3520 }
3521
3522 /// Invokes the given callback for each line of text in the given range of the buffer.
3523 /// Uses callback to avoid allocating a string for each line.
3524 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3525 let mut line = String::new();
3526 let mut row = range.start.row;
3527 for chunk in self
3528 .as_rope()
3529 .chunks_in_range(range.to_offset(self))
3530 .chain(["\n"])
3531 {
3532 for (newline_ix, text) in chunk.split('\n').enumerate() {
3533 if newline_ix > 0 {
3534 callback(row, &line);
3535 row += 1;
3536 line.clear();
3537 }
3538 line.push_str(text);
3539 }
3540 }
3541 }
3542
3543 /// Iterates over every [`SyntaxLayer`] in the buffer.
3544 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3545 self.syntax_layers_for_range(0..self.len(), true)
3546 }
3547
3548 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3549 let offset = position.to_offset(self);
3550 self.syntax_layers_for_range(offset..offset, false)
3551 .filter(|l| {
3552 if let Some(ranges) = l.included_sub_ranges {
3553 ranges.iter().any(|range| {
3554 let start = range.start.to_offset(self);
3555 start <= offset && {
3556 let end = range.end.to_offset(self);
3557 offset < end
3558 }
3559 })
3560 } else {
3561 l.node().start_byte() <= offset && l.node().end_byte() > offset
3562 }
3563 })
3564 .last()
3565 }
3566
3567 pub fn syntax_layers_for_range<D: ToOffset>(
3568 &self,
3569 range: Range<D>,
3570 include_hidden: bool,
3571 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3572 self.syntax
3573 .layers_for_range(range, &self.text, include_hidden)
3574 }
3575
3576 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3577 &self,
3578 range: Range<D>,
3579 ) -> Option<SyntaxLayer<'_>> {
3580 let range = range.to_offset(self);
3581 self.syntax
3582 .layers_for_range(range, &self.text, false)
3583 .max_by(|a, b| {
3584 if a.depth != b.depth {
3585 a.depth.cmp(&b.depth)
3586 } else if a.offset.0 != b.offset.0 {
3587 a.offset.0.cmp(&b.offset.0)
3588 } else {
3589 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3590 }
3591 })
3592 }
3593
3594 /// Returns the main [`Language`].
3595 pub fn language(&self) -> Option<&Arc<Language>> {
3596 self.language.as_ref()
3597 }
3598
3599 /// Returns the [`Language`] at the given location.
3600 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3601 self.syntax_layer_at(position)
3602 .map(|info| info.language)
3603 .or(self.language.as_ref())
3604 }
3605
3606 /// Returns the settings for the language at the given location.
3607 pub fn settings_at<'a, D: ToOffset>(
3608 &'a self,
3609 position: D,
3610 cx: &'a App,
3611 ) -> Cow<'a, LanguageSettings> {
3612 language_settings(
3613 self.language_at(position).map(|l| l.name()),
3614 self.file.as_ref(),
3615 cx,
3616 )
3617 }
3618
3619 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3620 CharClassifier::new(self.language_scope_at(point))
3621 }
3622
3623 /// Returns the [`LanguageScope`] at the given location.
3624 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3625 let offset = position.to_offset(self);
3626 let mut scope = None;
3627 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3628
3629 // Use the layer that has the smallest node intersecting the given point.
3630 for layer in self
3631 .syntax
3632 .layers_for_range(offset..offset, &self.text, false)
3633 {
3634 let mut cursor = layer.node().walk();
3635
3636 let mut range = None;
3637 loop {
3638 let child_range = cursor.node().byte_range();
3639 if !child_range.contains(&offset) {
3640 break;
3641 }
3642
3643 range = Some(child_range);
3644 if cursor.goto_first_child_for_byte(offset).is_none() {
3645 break;
3646 }
3647 }
3648
3649 if let Some(range) = range
3650 && smallest_range_and_depth.as_ref().is_none_or(
3651 |(smallest_range, smallest_range_depth)| {
3652 if layer.depth > *smallest_range_depth {
3653 true
3654 } else if layer.depth == *smallest_range_depth {
3655 range.len() < smallest_range.len()
3656 } else {
3657 false
3658 }
3659 },
3660 )
3661 {
3662 smallest_range_and_depth = Some((range, layer.depth));
3663 scope = Some(LanguageScope {
3664 language: layer.language.clone(),
3665 override_id: layer.override_id(offset, &self.text),
3666 });
3667 }
3668 }
3669
3670 scope.or_else(|| {
3671 self.language.clone().map(|language| LanguageScope {
3672 language,
3673 override_id: None,
3674 })
3675 })
3676 }
3677
3678 /// Returns a tuple of the range and character kind of the word
3679 /// surrounding the given position.
3680 pub fn surrounding_word<T: ToOffset>(
3681 &self,
3682 start: T,
3683 scope_context: Option<CharScopeContext>,
3684 ) -> (Range<usize>, Option<CharKind>) {
3685 let mut start = start.to_offset(self);
3686 let mut end = start;
3687 let mut next_chars = self.chars_at(start).take(128).peekable();
3688 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3689
3690 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3691 let word_kind = cmp::max(
3692 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3693 next_chars.peek().copied().map(|c| classifier.kind(c)),
3694 );
3695
3696 for ch in prev_chars {
3697 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3698 start -= ch.len_utf8();
3699 } else {
3700 break;
3701 }
3702 }
3703
3704 for ch in next_chars {
3705 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3706 end += ch.len_utf8();
3707 } else {
3708 break;
3709 }
3710 }
3711
3712 (start..end, word_kind)
3713 }
3714
3715 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3716 /// range. When `require_larger` is true, the node found must be larger than the query range.
3717 ///
3718 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3719 /// be moved to the root of the tree.
3720 fn goto_node_enclosing_range(
3721 cursor: &mut tree_sitter::TreeCursor,
3722 query_range: &Range<usize>,
3723 require_larger: bool,
3724 ) -> bool {
3725 let mut ascending = false;
3726 loop {
3727 let mut range = cursor.node().byte_range();
3728 if query_range.is_empty() {
3729 // When the query range is empty and the current node starts after it, move to the
3730 // previous sibling to find the node the containing node.
3731 if range.start > query_range.start {
3732 cursor.goto_previous_sibling();
3733 range = cursor.node().byte_range();
3734 }
3735 } else {
3736 // When the query range is non-empty and the current node ends exactly at the start,
3737 // move to the next sibling to find a node that extends beyond the start.
3738 if range.end == query_range.start {
3739 cursor.goto_next_sibling();
3740 range = cursor.node().byte_range();
3741 }
3742 }
3743
3744 let encloses = range.contains_inclusive(query_range)
3745 && (!require_larger || range.len() > query_range.len());
3746 if !encloses {
3747 ascending = true;
3748 if !cursor.goto_parent() {
3749 return false;
3750 }
3751 continue;
3752 } else if ascending {
3753 return true;
3754 }
3755
3756 // Descend into the current node.
3757 if cursor
3758 .goto_first_child_for_byte(query_range.start)
3759 .is_none()
3760 {
3761 return true;
3762 }
3763 }
3764 }
3765
3766 pub fn syntax_ancestor<'a, T: ToOffset>(
3767 &'a self,
3768 range: Range<T>,
3769 ) -> Option<tree_sitter::Node<'a>> {
3770 let range = range.start.to_offset(self)..range.end.to_offset(self);
3771 let mut result: Option<tree_sitter::Node<'a>> = None;
3772 for layer in self
3773 .syntax
3774 .layers_for_range(range.clone(), &self.text, true)
3775 {
3776 let mut cursor = layer.node().walk();
3777
3778 // Find the node that both contains the range and is larger than it.
3779 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3780 continue;
3781 }
3782
3783 let left_node = cursor.node();
3784 let mut layer_result = left_node;
3785
3786 // For an empty range, try to find another node immediately to the right of the range.
3787 if left_node.end_byte() == range.start {
3788 let mut right_node = None;
3789 while !cursor.goto_next_sibling() {
3790 if !cursor.goto_parent() {
3791 break;
3792 }
3793 }
3794
3795 while cursor.node().start_byte() == range.start {
3796 right_node = Some(cursor.node());
3797 if !cursor.goto_first_child() {
3798 break;
3799 }
3800 }
3801
3802 // If there is a candidate node on both sides of the (empty) range, then
3803 // decide between the two by favoring a named node over an anonymous token.
3804 // If both nodes are the same in that regard, favor the right one.
3805 if let Some(right_node) = right_node
3806 && (right_node.is_named() || !left_node.is_named())
3807 {
3808 layer_result = right_node;
3809 }
3810 }
3811
3812 if let Some(previous_result) = &result
3813 && previous_result.byte_range().len() < layer_result.byte_range().len()
3814 {
3815 continue;
3816 }
3817 result = Some(layer_result);
3818 }
3819
3820 result
3821 }
3822
3823 /// Find the previous sibling syntax node at the given range.
3824 ///
3825 /// This function locates the syntax node that precedes the node containing
3826 /// the given range. It searches hierarchically by:
3827 /// 1. Finding the node that contains the given range
3828 /// 2. Looking for the previous sibling at the same tree level
3829 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3830 ///
3831 /// Returns `None` if there is no previous sibling at any ancestor level.
3832 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3833 &'a self,
3834 range: Range<T>,
3835 ) -> Option<tree_sitter::Node<'a>> {
3836 let range = range.start.to_offset(self)..range.end.to_offset(self);
3837 let mut result: Option<tree_sitter::Node<'a>> = None;
3838
3839 for layer in self
3840 .syntax
3841 .layers_for_range(range.clone(), &self.text, true)
3842 {
3843 let mut cursor = layer.node().walk();
3844
3845 // Find the node that contains the range
3846 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3847 continue;
3848 }
3849
3850 // Look for the previous sibling, moving up ancestor levels if needed
3851 loop {
3852 if cursor.goto_previous_sibling() {
3853 let layer_result = cursor.node();
3854
3855 if let Some(previous_result) = &result {
3856 if previous_result.byte_range().end < layer_result.byte_range().end {
3857 continue;
3858 }
3859 }
3860 result = Some(layer_result);
3861 break;
3862 }
3863
3864 // No sibling found at this level, try moving up to parent
3865 if !cursor.goto_parent() {
3866 break;
3867 }
3868 }
3869 }
3870
3871 result
3872 }
3873
3874 /// Find the next sibling syntax node at the given range.
3875 ///
3876 /// This function locates the syntax node that follows the node containing
3877 /// the given range. It searches hierarchically by:
3878 /// 1. Finding the node that contains the given range
3879 /// 2. Looking for the next sibling at the same tree level
3880 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3881 ///
3882 /// Returns `None` if there is no next sibling at any ancestor level.
3883 pub fn syntax_next_sibling<'a, T: ToOffset>(
3884 &'a self,
3885 range: Range<T>,
3886 ) -> Option<tree_sitter::Node<'a>> {
3887 let range = range.start.to_offset(self)..range.end.to_offset(self);
3888 let mut result: Option<tree_sitter::Node<'a>> = None;
3889
3890 for layer in self
3891 .syntax
3892 .layers_for_range(range.clone(), &self.text, true)
3893 {
3894 let mut cursor = layer.node().walk();
3895
3896 // Find the node that contains the range
3897 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3898 continue;
3899 }
3900
3901 // Look for the next sibling, moving up ancestor levels if needed
3902 loop {
3903 if cursor.goto_next_sibling() {
3904 let layer_result = cursor.node();
3905
3906 if let Some(previous_result) = &result {
3907 if previous_result.byte_range().start > layer_result.byte_range().start {
3908 continue;
3909 }
3910 }
3911 result = Some(layer_result);
3912 break;
3913 }
3914
3915 // No sibling found at this level, try moving up to parent
3916 if !cursor.goto_parent() {
3917 break;
3918 }
3919 }
3920 }
3921
3922 result
3923 }
3924
3925 /// Returns the root syntax node within the given row
3926 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3927 let start_offset = position.to_offset(self);
3928
3929 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3930
3931 let layer = self
3932 .syntax
3933 .layers_for_range(start_offset..start_offset, &self.text, true)
3934 .next()?;
3935
3936 let mut cursor = layer.node().walk();
3937
3938 // Descend to the first leaf that touches the start of the range.
3939 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3940 if cursor.node().end_byte() == start_offset {
3941 cursor.goto_next_sibling();
3942 }
3943 }
3944
3945 // Ascend to the root node within the same row.
3946 while cursor.goto_parent() {
3947 if cursor.node().start_position().row != row {
3948 break;
3949 }
3950 }
3951
3952 Some(cursor.node())
3953 }
3954
3955 /// Returns the outline for the buffer.
3956 ///
3957 /// This method allows passing an optional [`SyntaxTheme`] to
3958 /// syntax-highlight the returned symbols.
3959 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3960 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3961 }
3962
3963 /// Returns all the symbols that contain the given position.
3964 ///
3965 /// This method allows passing an optional [`SyntaxTheme`] to
3966 /// syntax-highlight the returned symbols.
3967 pub fn symbols_containing<T: ToOffset>(
3968 &self,
3969 position: T,
3970 theme: Option<&SyntaxTheme>,
3971 ) -> Vec<OutlineItem<Anchor>> {
3972 let position = position.to_offset(self);
3973 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3974 let end = self.clip_offset(position + 1, Bias::Right);
3975 let mut items = self.outline_items_containing(start..end, false, theme);
3976 let mut prev_depth = None;
3977 items.retain(|item| {
3978 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3979 prev_depth = Some(item.depth);
3980 result
3981 });
3982 items
3983 }
3984
3985 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3986 let range = range.to_offset(self);
3987 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3988 grammar.outline_config.as_ref().map(|c| &c.query)
3989 });
3990 let configs = matches
3991 .grammars()
3992 .iter()
3993 .map(|g| g.outline_config.as_ref().unwrap())
3994 .collect::<Vec<_>>();
3995
3996 while let Some(mat) = matches.peek() {
3997 let config = &configs[mat.grammar_index];
3998 let containing_item_node = maybe!({
3999 let item_node = mat.captures.iter().find_map(|cap| {
4000 if cap.index == config.item_capture_ix {
4001 Some(cap.node)
4002 } else {
4003 None
4004 }
4005 })?;
4006
4007 let item_byte_range = item_node.byte_range();
4008 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4009 None
4010 } else {
4011 Some(item_node)
4012 }
4013 });
4014
4015 if let Some(item_node) = containing_item_node {
4016 return Some(
4017 Point::from_ts_point(item_node.start_position())
4018 ..Point::from_ts_point(item_node.end_position()),
4019 );
4020 }
4021
4022 matches.advance();
4023 }
4024 None
4025 }
4026
4027 pub fn outline_items_containing<T: ToOffset>(
4028 &self,
4029 range: Range<T>,
4030 include_extra_context: bool,
4031 theme: Option<&SyntaxTheme>,
4032 ) -> Vec<OutlineItem<Anchor>> {
4033 self.outline_items_containing_internal(
4034 range,
4035 include_extra_context,
4036 theme,
4037 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4038 )
4039 }
4040
4041 pub fn outline_items_as_points_containing<T: ToOffset>(
4042 &self,
4043 range: Range<T>,
4044 include_extra_context: bool,
4045 theme: Option<&SyntaxTheme>,
4046 ) -> Vec<OutlineItem<Point>> {
4047 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4048 range
4049 })
4050 }
4051
4052 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4053 &self,
4054 range: Range<T>,
4055 include_extra_context: bool,
4056 theme: Option<&SyntaxTheme>,
4057 ) -> Vec<OutlineItem<usize>> {
4058 self.outline_items_containing_internal(
4059 range,
4060 include_extra_context,
4061 theme,
4062 |buffer, range| range.to_offset(buffer),
4063 )
4064 }
4065
4066 fn outline_items_containing_internal<T: ToOffset, U>(
4067 &self,
4068 range: Range<T>,
4069 include_extra_context: bool,
4070 theme: Option<&SyntaxTheme>,
4071 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4072 ) -> Vec<OutlineItem<U>> {
4073 let range = range.to_offset(self);
4074 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4075 grammar.outline_config.as_ref().map(|c| &c.query)
4076 });
4077
4078 let mut items = Vec::new();
4079 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4080 while let Some(mat) = matches.peek() {
4081 let config = matches.grammars()[mat.grammar_index]
4082 .outline_config
4083 .as_ref()
4084 .unwrap();
4085 if let Some(item) =
4086 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4087 {
4088 items.push(item);
4089 } else if let Some(capture) = mat
4090 .captures
4091 .iter()
4092 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4093 {
4094 let capture_range = capture.node.start_position()..capture.node.end_position();
4095 let mut capture_row_range =
4096 capture_range.start.row as u32..capture_range.end.row as u32;
4097 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4098 {
4099 capture_row_range.end -= 1;
4100 }
4101 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4102 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4103 last_row_range.end = capture_row_range.end;
4104 } else {
4105 annotation_row_ranges.push(capture_row_range);
4106 }
4107 } else {
4108 annotation_row_ranges.push(capture_row_range);
4109 }
4110 }
4111 matches.advance();
4112 }
4113
4114 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4115
4116 // Assign depths based on containment relationships and convert to anchors.
4117 let mut item_ends_stack = Vec::<Point>::new();
4118 let mut anchor_items = Vec::new();
4119 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4120 for item in items {
4121 while let Some(last_end) = item_ends_stack.last().copied() {
4122 if last_end < item.range.end {
4123 item_ends_stack.pop();
4124 } else {
4125 break;
4126 }
4127 }
4128
4129 let mut annotation_row_range = None;
4130 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4131 let row_preceding_item = item.range.start.row.saturating_sub(1);
4132 if next_annotation_row_range.end < row_preceding_item {
4133 annotation_row_ranges.next();
4134 } else {
4135 if next_annotation_row_range.end == row_preceding_item {
4136 annotation_row_range = Some(next_annotation_row_range.clone());
4137 annotation_row_ranges.next();
4138 }
4139 break;
4140 }
4141 }
4142
4143 anchor_items.push(OutlineItem {
4144 depth: item_ends_stack.len(),
4145 range: range_callback(self, item.range.clone()),
4146 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4147 text: item.text,
4148 highlight_ranges: item.highlight_ranges,
4149 name_ranges: item.name_ranges,
4150 body_range: item.body_range.map(|r| range_callback(self, r)),
4151 annotation_range: annotation_row_range.map(|annotation_range| {
4152 let point_range = Point::new(annotation_range.start, 0)
4153 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4154 range_callback(self, point_range)
4155 }),
4156 });
4157 item_ends_stack.push(item.range.end);
4158 }
4159
4160 anchor_items
4161 }
4162
4163 fn next_outline_item(
4164 &self,
4165 config: &OutlineConfig,
4166 mat: &SyntaxMapMatch,
4167 range: &Range<usize>,
4168 include_extra_context: bool,
4169 theme: Option<&SyntaxTheme>,
4170 ) -> Option<OutlineItem<Point>> {
4171 let item_node = mat.captures.iter().find_map(|cap| {
4172 if cap.index == config.item_capture_ix {
4173 Some(cap.node)
4174 } else {
4175 None
4176 }
4177 })?;
4178
4179 let item_byte_range = item_node.byte_range();
4180 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4181 return None;
4182 }
4183 let item_point_range = Point::from_ts_point(item_node.start_position())
4184 ..Point::from_ts_point(item_node.end_position());
4185
4186 let mut open_point = None;
4187 let mut close_point = None;
4188
4189 let mut buffer_ranges = Vec::new();
4190 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4191 let mut range = node.start_byte()..node.end_byte();
4192 let start = node.start_position();
4193 if node.end_position().row > start.row {
4194 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4195 }
4196
4197 if !range.is_empty() {
4198 buffer_ranges.push((range, node_is_name));
4199 }
4200 };
4201
4202 for capture in mat.captures {
4203 if capture.index == config.name_capture_ix {
4204 add_to_buffer_ranges(capture.node, true);
4205 } else if Some(capture.index) == config.context_capture_ix
4206 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4207 {
4208 add_to_buffer_ranges(capture.node, false);
4209 } else {
4210 if Some(capture.index) == config.open_capture_ix {
4211 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4212 } else if Some(capture.index) == config.close_capture_ix {
4213 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4214 }
4215 }
4216 }
4217
4218 if buffer_ranges.is_empty() {
4219 return None;
4220 }
4221 let source_range_for_text =
4222 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4223
4224 let mut text = String::new();
4225 let mut highlight_ranges = Vec::new();
4226 let mut name_ranges = Vec::new();
4227 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4228 let mut last_buffer_range_end = 0;
4229 for (buffer_range, is_name) in buffer_ranges {
4230 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4231 if space_added {
4232 text.push(' ');
4233 }
4234 let before_append_len = text.len();
4235 let mut offset = buffer_range.start;
4236 chunks.seek(buffer_range.clone());
4237 for mut chunk in chunks.by_ref() {
4238 if chunk.text.len() > buffer_range.end - offset {
4239 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4240 offset = buffer_range.end;
4241 } else {
4242 offset += chunk.text.len();
4243 }
4244 let style = chunk
4245 .syntax_highlight_id
4246 .zip(theme)
4247 .and_then(|(highlight, theme)| highlight.style(theme));
4248 if let Some(style) = style {
4249 let start = text.len();
4250 let end = start + chunk.text.len();
4251 highlight_ranges.push((start..end, style));
4252 }
4253 text.push_str(chunk.text);
4254 if offset >= buffer_range.end {
4255 break;
4256 }
4257 }
4258 if is_name {
4259 let after_append_len = text.len();
4260 let start = if space_added && !name_ranges.is_empty() {
4261 before_append_len - 1
4262 } else {
4263 before_append_len
4264 };
4265 name_ranges.push(start..after_append_len);
4266 }
4267 last_buffer_range_end = buffer_range.end;
4268 }
4269
4270 Some(OutlineItem {
4271 depth: 0, // We'll calculate the depth later
4272 range: item_point_range,
4273 source_range_for_text: source_range_for_text.to_point(self),
4274 text,
4275 highlight_ranges,
4276 name_ranges,
4277 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4278 annotation_range: None,
4279 })
4280 }
4281
4282 pub fn function_body_fold_ranges<T: ToOffset>(
4283 &self,
4284 within: Range<T>,
4285 ) -> impl Iterator<Item = Range<usize>> + '_ {
4286 self.text_object_ranges(within, TreeSitterOptions::default())
4287 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4288 }
4289
4290 /// For each grammar in the language, runs the provided
4291 /// [`tree_sitter::Query`] against the given range.
4292 pub fn matches(
4293 &self,
4294 range: Range<usize>,
4295 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4296 ) -> SyntaxMapMatches<'_> {
4297 self.syntax.matches(range, self, query)
4298 }
4299
4300 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4301 /// Hence, may return more bracket pairs than the range contains.
4302 ///
4303 /// Will omit known chunks.
4304 /// The resulting bracket match collections are not ordered.
4305 pub fn fetch_bracket_ranges(
4306 &self,
4307 range: Range<usize>,
4308 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4309 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4310 let mut all_bracket_matches = HashMap::default();
4311
4312 for chunk in self
4313 .tree_sitter_data
4314 .chunks
4315 .applicable_chunks(&[range.to_point(self)])
4316 {
4317 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4318 continue;
4319 }
4320 let chunk_range = chunk.anchor_range();
4321 let chunk_range = chunk_range.to_offset(&self);
4322
4323 if let Some(cached_brackets) =
4324 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4325 {
4326 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4327 continue;
4328 }
4329
4330 let mut all_brackets = Vec::new();
4331 let mut opens = Vec::new();
4332 let mut color_pairs = Vec::new();
4333
4334 let mut matches = self
4335 .syntax
4336 .matches(chunk_range.clone(), &self.text, |grammar| {
4337 grammar.brackets_config.as_ref().map(|c| &c.query)
4338 });
4339 let configs = matches
4340 .grammars()
4341 .iter()
4342 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4343 .collect::<Vec<_>>();
4344
4345 while let Some(mat) = matches.peek() {
4346 let mut open = None;
4347 let mut close = None;
4348 let syntax_layer_depth = mat.depth;
4349 let config = configs[mat.grammar_index];
4350 let pattern = &config.patterns[mat.pattern_index];
4351 for capture in mat.captures {
4352 if capture.index == config.open_capture_ix {
4353 open = Some(capture.node.byte_range());
4354 } else if capture.index == config.close_capture_ix {
4355 close = Some(capture.node.byte_range());
4356 }
4357 }
4358
4359 matches.advance();
4360
4361 let Some((open_range, close_range)) = open.zip(close) else {
4362 continue;
4363 };
4364
4365 let bracket_range = open_range.start..=close_range.end;
4366 if !bracket_range.overlaps(&chunk_range) {
4367 continue;
4368 }
4369
4370 let index = all_brackets.len();
4371 all_brackets.push(BracketMatch {
4372 open_range: open_range.clone(),
4373 close_range: close_range.clone(),
4374 newline_only: pattern.newline_only,
4375 syntax_layer_depth,
4376 color_index: None,
4377 });
4378
4379 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4380 // bracket will match the entire tag with all text inside.
4381 // For now, avoid highlighting any pair that has more than single char in each bracket.
4382 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4383 let should_color =
4384 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4385 if should_color {
4386 opens.push(open_range.clone());
4387 color_pairs.push((open_range, close_range, index));
4388 }
4389 }
4390
4391 opens.sort_by_key(|r| (r.start, r.end));
4392 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4393 color_pairs.sort_by_key(|(_, close, _)| close.end);
4394
4395 let mut open_stack = Vec::new();
4396 let mut open_index = 0;
4397 for (open, close, index) in color_pairs {
4398 while open_index < opens.len() && opens[open_index].start < close.start {
4399 open_stack.push(opens[open_index].clone());
4400 open_index += 1;
4401 }
4402
4403 if open_stack.last() == Some(&open) {
4404 let depth_index = open_stack.len() - 1;
4405 all_brackets[index].color_index = Some(depth_index);
4406 open_stack.pop();
4407 }
4408 }
4409
4410 all_brackets.sort_by_key(|bracket_match| {
4411 (bracket_match.open_range.start, bracket_match.open_range.end)
4412 });
4413
4414 if let empty_slot @ None =
4415 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4416 {
4417 *empty_slot = Some(all_brackets.clone());
4418 }
4419 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4420 }
4421
4422 all_bracket_matches
4423 }
4424
4425 pub fn all_bracket_ranges(
4426 &self,
4427 range: Range<usize>,
4428 ) -> impl Iterator<Item = BracketMatch<usize>> {
4429 self.fetch_bracket_ranges(range.clone(), None)
4430 .into_values()
4431 .flatten()
4432 .filter(move |bracket_match| {
4433 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4434 bracket_range.overlaps(&range)
4435 })
4436 }
4437
4438 /// Returns bracket range pairs overlapping or adjacent to `range`
4439 pub fn bracket_ranges<T: ToOffset>(
4440 &self,
4441 range: Range<T>,
4442 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4443 // Find bracket pairs that *inclusively* contain the given range.
4444 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4445 self.all_bracket_ranges(range)
4446 .filter(|pair| !pair.newline_only)
4447 }
4448
4449 pub fn debug_variables_query<T: ToOffset>(
4450 &self,
4451 range: Range<T>,
4452 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4453 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4454
4455 let mut matches = self.syntax.matches_with_options(
4456 range.clone(),
4457 &self.text,
4458 TreeSitterOptions::default(),
4459 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4460 );
4461
4462 let configs = matches
4463 .grammars()
4464 .iter()
4465 .map(|grammar| grammar.debug_variables_config.as_ref())
4466 .collect::<Vec<_>>();
4467
4468 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4469
4470 iter::from_fn(move || {
4471 loop {
4472 while let Some(capture) = captures.pop() {
4473 if capture.0.overlaps(&range) {
4474 return Some(capture);
4475 }
4476 }
4477
4478 let mat = matches.peek()?;
4479
4480 let Some(config) = configs[mat.grammar_index].as_ref() else {
4481 matches.advance();
4482 continue;
4483 };
4484
4485 for capture in mat.captures {
4486 let Some(ix) = config
4487 .objects_by_capture_ix
4488 .binary_search_by_key(&capture.index, |e| e.0)
4489 .ok()
4490 else {
4491 continue;
4492 };
4493 let text_object = config.objects_by_capture_ix[ix].1;
4494 let byte_range = capture.node.byte_range();
4495
4496 let mut found = false;
4497 for (range, existing) in captures.iter_mut() {
4498 if existing == &text_object {
4499 range.start = range.start.min(byte_range.start);
4500 range.end = range.end.max(byte_range.end);
4501 found = true;
4502 break;
4503 }
4504 }
4505
4506 if !found {
4507 captures.push((byte_range, text_object));
4508 }
4509 }
4510
4511 matches.advance();
4512 }
4513 })
4514 }
4515
4516 pub fn text_object_ranges<T: ToOffset>(
4517 &self,
4518 range: Range<T>,
4519 options: TreeSitterOptions,
4520 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4521 let range =
4522 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4523
4524 let mut matches =
4525 self.syntax
4526 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4527 grammar.text_object_config.as_ref().map(|c| &c.query)
4528 });
4529
4530 let configs = matches
4531 .grammars()
4532 .iter()
4533 .map(|grammar| grammar.text_object_config.as_ref())
4534 .collect::<Vec<_>>();
4535
4536 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4537
4538 iter::from_fn(move || {
4539 loop {
4540 while let Some(capture) = captures.pop() {
4541 if capture.0.overlaps(&range) {
4542 return Some(capture);
4543 }
4544 }
4545
4546 let mat = matches.peek()?;
4547
4548 let Some(config) = configs[mat.grammar_index].as_ref() else {
4549 matches.advance();
4550 continue;
4551 };
4552
4553 for capture in mat.captures {
4554 let Some(ix) = config
4555 .text_objects_by_capture_ix
4556 .binary_search_by_key(&capture.index, |e| e.0)
4557 .ok()
4558 else {
4559 continue;
4560 };
4561 let text_object = config.text_objects_by_capture_ix[ix].1;
4562 let byte_range = capture.node.byte_range();
4563
4564 let mut found = false;
4565 for (range, existing) in captures.iter_mut() {
4566 if existing == &text_object {
4567 range.start = range.start.min(byte_range.start);
4568 range.end = range.end.max(byte_range.end);
4569 found = true;
4570 break;
4571 }
4572 }
4573
4574 if !found {
4575 captures.push((byte_range, text_object));
4576 }
4577 }
4578
4579 matches.advance();
4580 }
4581 })
4582 }
4583
4584 /// Returns enclosing bracket ranges containing the given range
4585 pub fn enclosing_bracket_ranges<T: ToOffset>(
4586 &self,
4587 range: Range<T>,
4588 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4589 let range = range.start.to_offset(self)..range.end.to_offset(self);
4590
4591 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4592 let max_depth = result
4593 .iter()
4594 .map(|mat| mat.syntax_layer_depth)
4595 .max()
4596 .unwrap_or(0);
4597 result.into_iter().filter(move |pair| {
4598 pair.open_range.start <= range.start
4599 && pair.close_range.end >= range.end
4600 && pair.syntax_layer_depth == max_depth
4601 })
4602 }
4603
4604 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4605 ///
4606 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4607 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4608 &self,
4609 range: Range<T>,
4610 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4611 ) -> Option<(Range<usize>, Range<usize>)> {
4612 let range = range.start.to_offset(self)..range.end.to_offset(self);
4613
4614 // Get the ranges of the innermost pair of brackets.
4615 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4616
4617 for pair in self.enclosing_bracket_ranges(range) {
4618 if let Some(range_filter) = range_filter
4619 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4620 {
4621 continue;
4622 }
4623
4624 let len = pair.close_range.end - pair.open_range.start;
4625
4626 if let Some((existing_open, existing_close)) = &result {
4627 let existing_len = existing_close.end - existing_open.start;
4628 if len > existing_len {
4629 continue;
4630 }
4631 }
4632
4633 result = Some((pair.open_range, pair.close_range));
4634 }
4635
4636 result
4637 }
4638
4639 /// Returns anchor ranges for any matches of the redaction query.
4640 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4641 /// will be run on the relevant section of the buffer.
4642 pub fn redacted_ranges<T: ToOffset>(
4643 &self,
4644 range: Range<T>,
4645 ) -> impl Iterator<Item = Range<usize>> + '_ {
4646 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4647 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4648 grammar
4649 .redactions_config
4650 .as_ref()
4651 .map(|config| &config.query)
4652 });
4653
4654 let configs = syntax_matches
4655 .grammars()
4656 .iter()
4657 .map(|grammar| grammar.redactions_config.as_ref())
4658 .collect::<Vec<_>>();
4659
4660 iter::from_fn(move || {
4661 let redacted_range = syntax_matches
4662 .peek()
4663 .and_then(|mat| {
4664 configs[mat.grammar_index].and_then(|config| {
4665 mat.captures
4666 .iter()
4667 .find(|capture| capture.index == config.redaction_capture_ix)
4668 })
4669 })
4670 .map(|mat| mat.node.byte_range());
4671 syntax_matches.advance();
4672 redacted_range
4673 })
4674 }
4675
4676 pub fn injections_intersecting_range<T: ToOffset>(
4677 &self,
4678 range: Range<T>,
4679 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4680 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4681
4682 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4683 grammar
4684 .injection_config
4685 .as_ref()
4686 .map(|config| &config.query)
4687 });
4688
4689 let configs = syntax_matches
4690 .grammars()
4691 .iter()
4692 .map(|grammar| grammar.injection_config.as_ref())
4693 .collect::<Vec<_>>();
4694
4695 iter::from_fn(move || {
4696 let ranges = syntax_matches.peek().and_then(|mat| {
4697 let config = &configs[mat.grammar_index]?;
4698 let content_capture_range = mat.captures.iter().find_map(|capture| {
4699 if capture.index == config.content_capture_ix {
4700 Some(capture.node.byte_range())
4701 } else {
4702 None
4703 }
4704 })?;
4705 let language = self.language_at(content_capture_range.start)?;
4706 Some((content_capture_range, language))
4707 });
4708 syntax_matches.advance();
4709 ranges
4710 })
4711 }
4712
4713 pub fn runnable_ranges(
4714 &self,
4715 offset_range: Range<usize>,
4716 ) -> impl Iterator<Item = RunnableRange> + '_ {
4717 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4718 grammar.runnable_config.as_ref().map(|config| &config.query)
4719 });
4720
4721 let test_configs = syntax_matches
4722 .grammars()
4723 .iter()
4724 .map(|grammar| grammar.runnable_config.as_ref())
4725 .collect::<Vec<_>>();
4726
4727 iter::from_fn(move || {
4728 loop {
4729 let mat = syntax_matches.peek()?;
4730
4731 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4732 let mut run_range = None;
4733 let full_range = mat.captures.iter().fold(
4734 Range {
4735 start: usize::MAX,
4736 end: 0,
4737 },
4738 |mut acc, next| {
4739 let byte_range = next.node.byte_range();
4740 if acc.start > byte_range.start {
4741 acc.start = byte_range.start;
4742 }
4743 if acc.end < byte_range.end {
4744 acc.end = byte_range.end;
4745 }
4746 acc
4747 },
4748 );
4749 if full_range.start > full_range.end {
4750 // We did not find a full spanning range of this match.
4751 return None;
4752 }
4753 let extra_captures: SmallVec<[_; 1]> =
4754 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4755 test_configs
4756 .extra_captures
4757 .get(capture.index as usize)
4758 .cloned()
4759 .and_then(|tag_name| match tag_name {
4760 RunnableCapture::Named(name) => {
4761 Some((capture.node.byte_range(), name))
4762 }
4763 RunnableCapture::Run => {
4764 let _ = run_range.insert(capture.node.byte_range());
4765 None
4766 }
4767 })
4768 }));
4769 let run_range = run_range?;
4770 let tags = test_configs
4771 .query
4772 .property_settings(mat.pattern_index)
4773 .iter()
4774 .filter_map(|property| {
4775 if *property.key == *"tag" {
4776 property
4777 .value
4778 .as_ref()
4779 .map(|value| RunnableTag(value.to_string().into()))
4780 } else {
4781 None
4782 }
4783 })
4784 .collect();
4785 let extra_captures = extra_captures
4786 .into_iter()
4787 .map(|(range, name)| {
4788 (
4789 name.to_string(),
4790 self.text_for_range(range).collect::<String>(),
4791 )
4792 })
4793 .collect();
4794 // All tags should have the same range.
4795 Some(RunnableRange {
4796 run_range,
4797 full_range,
4798 runnable: Runnable {
4799 tags,
4800 language: mat.language,
4801 buffer: self.remote_id(),
4802 },
4803 extra_captures,
4804 buffer_id: self.remote_id(),
4805 })
4806 });
4807
4808 syntax_matches.advance();
4809 if test_range.is_some() {
4810 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4811 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4812 return test_range;
4813 }
4814 }
4815 })
4816 }
4817
4818 /// Returns selections for remote peers intersecting the given range.
4819 #[allow(clippy::type_complexity)]
4820 pub fn selections_in_range(
4821 &self,
4822 range: Range<Anchor>,
4823 include_local: bool,
4824 ) -> impl Iterator<
4825 Item = (
4826 ReplicaId,
4827 bool,
4828 CursorShape,
4829 impl Iterator<Item = &Selection<Anchor>> + '_,
4830 ),
4831 > + '_ {
4832 self.remote_selections
4833 .iter()
4834 .filter(move |(replica_id, set)| {
4835 (include_local || **replica_id != self.text.replica_id())
4836 && !set.selections.is_empty()
4837 })
4838 .map(move |(replica_id, set)| {
4839 let start_ix = match set.selections.binary_search_by(|probe| {
4840 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4841 }) {
4842 Ok(ix) | Err(ix) => ix,
4843 };
4844 let end_ix = match set.selections.binary_search_by(|probe| {
4845 probe.start.cmp(&range.end, self).then(Ordering::Less)
4846 }) {
4847 Ok(ix) | Err(ix) => ix,
4848 };
4849
4850 (
4851 *replica_id,
4852 set.line_mode,
4853 set.cursor_shape,
4854 set.selections[start_ix..end_ix].iter(),
4855 )
4856 })
4857 }
4858
4859 /// Returns if the buffer contains any diagnostics.
4860 pub fn has_diagnostics(&self) -> bool {
4861 !self.diagnostics.is_empty()
4862 }
4863
4864 /// Returns all the diagnostics intersecting the given range.
4865 pub fn diagnostics_in_range<'a, T, O>(
4866 &'a self,
4867 search_range: Range<T>,
4868 reversed: bool,
4869 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4870 where
4871 T: 'a + Clone + ToOffset,
4872 O: 'a + FromAnchor,
4873 {
4874 let mut iterators: Vec<_> = self
4875 .diagnostics
4876 .iter()
4877 .map(|(_, collection)| {
4878 collection
4879 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4880 .peekable()
4881 })
4882 .collect();
4883
4884 std::iter::from_fn(move || {
4885 let (next_ix, _) = iterators
4886 .iter_mut()
4887 .enumerate()
4888 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4889 .min_by(|(_, a), (_, b)| {
4890 let cmp = a
4891 .range
4892 .start
4893 .cmp(&b.range.start, self)
4894 // when range is equal, sort by diagnostic severity
4895 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4896 // and stabilize order with group_id
4897 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4898 if reversed { cmp.reverse() } else { cmp }
4899 })?;
4900 iterators[next_ix]
4901 .next()
4902 .map(
4903 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4904 diagnostic,
4905 range: FromAnchor::from_anchor(&range.start, self)
4906 ..FromAnchor::from_anchor(&range.end, self),
4907 },
4908 )
4909 })
4910 }
4911
4912 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4913 /// should be used instead.
4914 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4915 &self.diagnostics
4916 }
4917
4918 /// Returns all the diagnostic groups associated with the given
4919 /// language server ID. If no language server ID is provided,
4920 /// all diagnostics groups are returned.
4921 pub fn diagnostic_groups(
4922 &self,
4923 language_server_id: Option<LanguageServerId>,
4924 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4925 let mut groups = Vec::new();
4926
4927 if let Some(language_server_id) = language_server_id {
4928 if let Ok(ix) = self
4929 .diagnostics
4930 .binary_search_by_key(&language_server_id, |e| e.0)
4931 {
4932 self.diagnostics[ix]
4933 .1
4934 .groups(language_server_id, &mut groups, self);
4935 }
4936 } else {
4937 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4938 diagnostics.groups(*language_server_id, &mut groups, self);
4939 }
4940 }
4941
4942 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4943 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4944 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4945 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4946 });
4947
4948 groups
4949 }
4950
4951 /// Returns an iterator over the diagnostics for the given group.
4952 pub fn diagnostic_group<O>(
4953 &self,
4954 group_id: usize,
4955 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4956 where
4957 O: FromAnchor + 'static,
4958 {
4959 self.diagnostics
4960 .iter()
4961 .flat_map(move |(_, set)| set.group(group_id, self))
4962 }
4963
4964 /// An integer version number that accounts for all updates besides
4965 /// the buffer's text itself (which is versioned via a version vector).
4966 pub fn non_text_state_update_count(&self) -> usize {
4967 self.non_text_state_update_count
4968 }
4969
4970 /// An integer version that changes when the buffer's syntax changes.
4971 pub fn syntax_update_count(&self) -> usize {
4972 self.syntax.update_count()
4973 }
4974
4975 /// Returns a snapshot of underlying file.
4976 pub fn file(&self) -> Option<&Arc<dyn File>> {
4977 self.file.as_ref()
4978 }
4979
4980 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4981 if let Some(file) = self.file() {
4982 if file.path().file_name().is_none() || include_root {
4983 Some(file.full_path(cx).to_string_lossy().into_owned())
4984 } else {
4985 Some(file.path().display(file.path_style(cx)).to_string())
4986 }
4987 } else {
4988 None
4989 }
4990 }
4991
4992 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4993 let query_str = query.fuzzy_contents;
4994 if query_str.is_some_and(|query| query.is_empty()) {
4995 return BTreeMap::default();
4996 }
4997
4998 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4999 language,
5000 override_id: None,
5001 }));
5002
5003 let mut query_ix = 0;
5004 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5005 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5006
5007 let mut words = BTreeMap::default();
5008 let mut current_word_start_ix = None;
5009 let mut chunk_ix = query.range.start;
5010 for chunk in self.chunks(query.range, false) {
5011 for (i, c) in chunk.text.char_indices() {
5012 let ix = chunk_ix + i;
5013 if classifier.is_word(c) {
5014 if current_word_start_ix.is_none() {
5015 current_word_start_ix = Some(ix);
5016 }
5017
5018 if let Some(query_chars) = &query_chars
5019 && query_ix < query_len
5020 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5021 {
5022 query_ix += 1;
5023 }
5024 continue;
5025 } else if let Some(word_start) = current_word_start_ix.take()
5026 && query_ix == query_len
5027 {
5028 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5029 let mut word_text = self.text_for_range(word_start..ix).peekable();
5030 let first_char = word_text
5031 .peek()
5032 .and_then(|first_chunk| first_chunk.chars().next());
5033 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5034 if !query.skip_digits
5035 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5036 {
5037 words.insert(word_text.collect(), word_range);
5038 }
5039 }
5040 query_ix = 0;
5041 }
5042 chunk_ix += chunk.text.len();
5043 }
5044
5045 words
5046 }
5047}
5048
5049pub struct WordsQuery<'a> {
5050 /// Only returns words with all chars from the fuzzy string in them.
5051 pub fuzzy_contents: Option<&'a str>,
5052 /// Skips words that start with a digit.
5053 pub skip_digits: bool,
5054 /// Buffer offset range, to look for words.
5055 pub range: Range<usize>,
5056}
5057
5058fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5059 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5060}
5061
5062fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5063 let mut result = IndentSize::spaces(0);
5064 for c in text {
5065 let kind = match c {
5066 ' ' => IndentKind::Space,
5067 '\t' => IndentKind::Tab,
5068 _ => break,
5069 };
5070 if result.len == 0 {
5071 result.kind = kind;
5072 }
5073 result.len += 1;
5074 }
5075 result
5076}
5077
5078impl Clone for BufferSnapshot {
5079 fn clone(&self) -> Self {
5080 Self {
5081 text: self.text.clone(),
5082 syntax: self.syntax.clone(),
5083 file: self.file.clone(),
5084 remote_selections: self.remote_selections.clone(),
5085 diagnostics: self.diagnostics.clone(),
5086 language: self.language.clone(),
5087 tree_sitter_data: self.tree_sitter_data.clone(),
5088 non_text_state_update_count: self.non_text_state_update_count,
5089 }
5090 }
5091}
5092
5093impl Deref for BufferSnapshot {
5094 type Target = text::BufferSnapshot;
5095
5096 fn deref(&self) -> &Self::Target {
5097 &self.text
5098 }
5099}
5100
5101unsafe impl Send for BufferChunks<'_> {}
5102
5103impl<'a> BufferChunks<'a> {
5104 pub(crate) fn new(
5105 text: &'a Rope,
5106 range: Range<usize>,
5107 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5108 diagnostics: bool,
5109 buffer_snapshot: Option<&'a BufferSnapshot>,
5110 ) -> Self {
5111 let mut highlights = None;
5112 if let Some((captures, highlight_maps)) = syntax {
5113 highlights = Some(BufferChunkHighlights {
5114 captures,
5115 next_capture: None,
5116 stack: Default::default(),
5117 highlight_maps,
5118 })
5119 }
5120
5121 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5122 let chunks = text.chunks_in_range(range.clone());
5123
5124 let mut this = BufferChunks {
5125 range,
5126 buffer_snapshot,
5127 chunks,
5128 diagnostic_endpoints,
5129 error_depth: 0,
5130 warning_depth: 0,
5131 information_depth: 0,
5132 hint_depth: 0,
5133 unnecessary_depth: 0,
5134 underline: true,
5135 highlights,
5136 };
5137 this.initialize_diagnostic_endpoints();
5138 this
5139 }
5140
5141 /// Seeks to the given byte offset in the buffer.
5142 pub fn seek(&mut self, range: Range<usize>) {
5143 let old_range = std::mem::replace(&mut self.range, range.clone());
5144 self.chunks.set_range(self.range.clone());
5145 if let Some(highlights) = self.highlights.as_mut() {
5146 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5147 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5148 highlights
5149 .stack
5150 .retain(|(end_offset, _)| *end_offset > range.start);
5151 if let Some(capture) = &highlights.next_capture
5152 && range.start >= capture.node.start_byte()
5153 {
5154 let next_capture_end = capture.node.end_byte();
5155 if range.start < next_capture_end {
5156 highlights.stack.push((
5157 next_capture_end,
5158 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5159 ));
5160 }
5161 highlights.next_capture.take();
5162 }
5163 } else if let Some(snapshot) = self.buffer_snapshot {
5164 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5165 *highlights = BufferChunkHighlights {
5166 captures,
5167 next_capture: None,
5168 stack: Default::default(),
5169 highlight_maps,
5170 };
5171 } else {
5172 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5173 // Seeking such BufferChunks is not supported.
5174 debug_assert!(
5175 false,
5176 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5177 );
5178 }
5179
5180 highlights.captures.set_byte_range(self.range.clone());
5181 self.initialize_diagnostic_endpoints();
5182 }
5183 }
5184
5185 fn initialize_diagnostic_endpoints(&mut self) {
5186 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5187 && let Some(buffer) = self.buffer_snapshot
5188 {
5189 let mut diagnostic_endpoints = Vec::new();
5190 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5191 diagnostic_endpoints.push(DiagnosticEndpoint {
5192 offset: entry.range.start,
5193 is_start: true,
5194 severity: entry.diagnostic.severity,
5195 is_unnecessary: entry.diagnostic.is_unnecessary,
5196 underline: entry.diagnostic.underline,
5197 });
5198 diagnostic_endpoints.push(DiagnosticEndpoint {
5199 offset: entry.range.end,
5200 is_start: false,
5201 severity: entry.diagnostic.severity,
5202 is_unnecessary: entry.diagnostic.is_unnecessary,
5203 underline: entry.diagnostic.underline,
5204 });
5205 }
5206 diagnostic_endpoints
5207 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5208 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5209 self.hint_depth = 0;
5210 self.error_depth = 0;
5211 self.warning_depth = 0;
5212 self.information_depth = 0;
5213 }
5214 }
5215
5216 /// The current byte offset in the buffer.
5217 pub fn offset(&self) -> usize {
5218 self.range.start
5219 }
5220
5221 pub fn range(&self) -> Range<usize> {
5222 self.range.clone()
5223 }
5224
5225 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5226 let depth = match endpoint.severity {
5227 DiagnosticSeverity::ERROR => &mut self.error_depth,
5228 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5229 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5230 DiagnosticSeverity::HINT => &mut self.hint_depth,
5231 _ => return,
5232 };
5233 if endpoint.is_start {
5234 *depth += 1;
5235 } else {
5236 *depth -= 1;
5237 }
5238
5239 if endpoint.is_unnecessary {
5240 if endpoint.is_start {
5241 self.unnecessary_depth += 1;
5242 } else {
5243 self.unnecessary_depth -= 1;
5244 }
5245 }
5246 }
5247
5248 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5249 if self.error_depth > 0 {
5250 Some(DiagnosticSeverity::ERROR)
5251 } else if self.warning_depth > 0 {
5252 Some(DiagnosticSeverity::WARNING)
5253 } else if self.information_depth > 0 {
5254 Some(DiagnosticSeverity::INFORMATION)
5255 } else if self.hint_depth > 0 {
5256 Some(DiagnosticSeverity::HINT)
5257 } else {
5258 None
5259 }
5260 }
5261
5262 fn current_code_is_unnecessary(&self) -> bool {
5263 self.unnecessary_depth > 0
5264 }
5265}
5266
5267impl<'a> Iterator for BufferChunks<'a> {
5268 type Item = Chunk<'a>;
5269
5270 fn next(&mut self) -> Option<Self::Item> {
5271 let mut next_capture_start = usize::MAX;
5272 let mut next_diagnostic_endpoint = usize::MAX;
5273
5274 if let Some(highlights) = self.highlights.as_mut() {
5275 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5276 if *parent_capture_end <= self.range.start {
5277 highlights.stack.pop();
5278 } else {
5279 break;
5280 }
5281 }
5282
5283 if highlights.next_capture.is_none() {
5284 highlights.next_capture = highlights.captures.next();
5285 }
5286
5287 while let Some(capture) = highlights.next_capture.as_ref() {
5288 if self.range.start < capture.node.start_byte() {
5289 next_capture_start = capture.node.start_byte();
5290 break;
5291 } else {
5292 let highlight_id =
5293 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5294 highlights
5295 .stack
5296 .push((capture.node.end_byte(), highlight_id));
5297 highlights.next_capture = highlights.captures.next();
5298 }
5299 }
5300 }
5301
5302 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5303 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5304 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5305 if endpoint.offset <= self.range.start {
5306 self.update_diagnostic_depths(endpoint);
5307 diagnostic_endpoints.next();
5308 self.underline = endpoint.underline;
5309 } else {
5310 next_diagnostic_endpoint = endpoint.offset;
5311 break;
5312 }
5313 }
5314 }
5315 self.diagnostic_endpoints = diagnostic_endpoints;
5316
5317 if let Some(ChunkBitmaps {
5318 text: chunk,
5319 chars: chars_map,
5320 tabs,
5321 }) = self.chunks.peek_with_bitmaps()
5322 {
5323 let chunk_start = self.range.start;
5324 let mut chunk_end = (self.chunks.offset() + chunk.len())
5325 .min(next_capture_start)
5326 .min(next_diagnostic_endpoint);
5327 let mut highlight_id = None;
5328 if let Some(highlights) = self.highlights.as_ref()
5329 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5330 {
5331 chunk_end = chunk_end.min(*parent_capture_end);
5332 highlight_id = Some(*parent_highlight_id);
5333 }
5334 let bit_start = chunk_start - self.chunks.offset();
5335 let bit_end = chunk_end - self.chunks.offset();
5336
5337 let slice = &chunk[bit_start..bit_end];
5338
5339 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5340 let tabs = (tabs >> bit_start) & mask;
5341 let chars = (chars_map >> bit_start) & mask;
5342
5343 self.range.start = chunk_end;
5344 if self.range.start == self.chunks.offset() + chunk.len() {
5345 self.chunks.next().unwrap();
5346 }
5347
5348 Some(Chunk {
5349 text: slice,
5350 syntax_highlight_id: highlight_id,
5351 underline: self.underline,
5352 diagnostic_severity: self.current_diagnostic_severity(),
5353 is_unnecessary: self.current_code_is_unnecessary(),
5354 tabs,
5355 chars,
5356 ..Chunk::default()
5357 })
5358 } else {
5359 None
5360 }
5361 }
5362}
5363
5364impl operation_queue::Operation for Operation {
5365 fn lamport_timestamp(&self) -> clock::Lamport {
5366 match self {
5367 Operation::Buffer(_) => {
5368 unreachable!("buffer operations should never be deferred at this layer")
5369 }
5370 Operation::UpdateDiagnostics {
5371 lamport_timestamp, ..
5372 }
5373 | Operation::UpdateSelections {
5374 lamport_timestamp, ..
5375 }
5376 | Operation::UpdateCompletionTriggers {
5377 lamport_timestamp, ..
5378 }
5379 | Operation::UpdateLineEnding {
5380 lamport_timestamp, ..
5381 } => *lamport_timestamp,
5382 }
5383 }
5384}
5385
5386impl Default for Diagnostic {
5387 fn default() -> Self {
5388 Self {
5389 source: Default::default(),
5390 source_kind: DiagnosticSourceKind::Other,
5391 code: None,
5392 code_description: None,
5393 severity: DiagnosticSeverity::ERROR,
5394 message: Default::default(),
5395 markdown: None,
5396 group_id: 0,
5397 is_primary: false,
5398 is_disk_based: false,
5399 is_unnecessary: false,
5400 underline: true,
5401 data: None,
5402 registration_id: None,
5403 }
5404 }
5405}
5406
5407impl IndentSize {
5408 /// Returns an [`IndentSize`] representing the given spaces.
5409 pub fn spaces(len: u32) -> Self {
5410 Self {
5411 len,
5412 kind: IndentKind::Space,
5413 }
5414 }
5415
5416 /// Returns an [`IndentSize`] representing a tab.
5417 pub fn tab() -> Self {
5418 Self {
5419 len: 1,
5420 kind: IndentKind::Tab,
5421 }
5422 }
5423
5424 /// An iterator over the characters represented by this [`IndentSize`].
5425 pub fn chars(&self) -> impl Iterator<Item = char> {
5426 iter::repeat(self.char()).take(self.len as usize)
5427 }
5428
5429 /// The character representation of this [`IndentSize`].
5430 pub fn char(&self) -> char {
5431 match self.kind {
5432 IndentKind::Space => ' ',
5433 IndentKind::Tab => '\t',
5434 }
5435 }
5436
5437 /// Consumes the current [`IndentSize`] and returns a new one that has
5438 /// been shrunk or enlarged by the given size along the given direction.
5439 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5440 match direction {
5441 Ordering::Less => {
5442 if self.kind == size.kind && self.len >= size.len {
5443 self.len -= size.len;
5444 }
5445 }
5446 Ordering::Equal => {}
5447 Ordering::Greater => {
5448 if self.len == 0 {
5449 self = size;
5450 } else if self.kind == size.kind {
5451 self.len += size.len;
5452 }
5453 }
5454 }
5455 self
5456 }
5457
5458 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5459 match self.kind {
5460 IndentKind::Space => self.len as usize,
5461 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5462 }
5463 }
5464}
5465
5466#[cfg(any(test, feature = "test-support"))]
5467pub struct TestFile {
5468 pub path: Arc<RelPath>,
5469 pub root_name: String,
5470 pub local_root: Option<PathBuf>,
5471}
5472
5473#[cfg(any(test, feature = "test-support"))]
5474impl File for TestFile {
5475 fn path(&self) -> &Arc<RelPath> {
5476 &self.path
5477 }
5478
5479 fn full_path(&self, _: &gpui::App) -> PathBuf {
5480 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5481 }
5482
5483 fn as_local(&self) -> Option<&dyn LocalFile> {
5484 if self.local_root.is_some() {
5485 Some(self)
5486 } else {
5487 None
5488 }
5489 }
5490
5491 fn disk_state(&self) -> DiskState {
5492 unimplemented!()
5493 }
5494
5495 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5496 self.path().file_name().unwrap_or(self.root_name.as_ref())
5497 }
5498
5499 fn worktree_id(&self, _: &App) -> WorktreeId {
5500 WorktreeId::from_usize(0)
5501 }
5502
5503 fn to_proto(&self, _: &App) -> rpc::proto::File {
5504 unimplemented!()
5505 }
5506
5507 fn is_private(&self) -> bool {
5508 false
5509 }
5510
5511 fn path_style(&self, _cx: &App) -> PathStyle {
5512 PathStyle::local()
5513 }
5514}
5515
5516#[cfg(any(test, feature = "test-support"))]
5517impl LocalFile for TestFile {
5518 fn abs_path(&self, _cx: &App) -> PathBuf {
5519 PathBuf::from(self.local_root.as_ref().unwrap())
5520 .join(&self.root_name)
5521 .join(self.path.as_std_path())
5522 }
5523
5524 fn load(&self, _cx: &App) -> Task<Result<String>> {
5525 unimplemented!()
5526 }
5527
5528 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5529 unimplemented!()
5530 }
5531}
5532
5533pub(crate) fn contiguous_ranges(
5534 values: impl Iterator<Item = u32>,
5535 max_len: usize,
5536) -> impl Iterator<Item = Range<u32>> {
5537 let mut values = values;
5538 let mut current_range: Option<Range<u32>> = None;
5539 std::iter::from_fn(move || {
5540 loop {
5541 if let Some(value) = values.next() {
5542 if let Some(range) = &mut current_range
5543 && value == range.end
5544 && range.len() < max_len
5545 {
5546 range.end += 1;
5547 continue;
5548 }
5549
5550 let prev_range = current_range.clone();
5551 current_range = Some(value..(value + 1));
5552 if prev_range.is_some() {
5553 return prev_range;
5554 }
5555 } else {
5556 return current_range.take();
5557 }
5558 }
5559 })
5560}
5561
5562#[derive(Default, Debug)]
5563pub struct CharClassifier {
5564 scope: Option<LanguageScope>,
5565 scope_context: Option<CharScopeContext>,
5566 ignore_punctuation: bool,
5567}
5568
5569impl CharClassifier {
5570 pub fn new(scope: Option<LanguageScope>) -> Self {
5571 Self {
5572 scope,
5573 scope_context: None,
5574 ignore_punctuation: false,
5575 }
5576 }
5577
5578 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5579 Self {
5580 scope_context,
5581 ..self
5582 }
5583 }
5584
5585 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5586 Self {
5587 ignore_punctuation,
5588 ..self
5589 }
5590 }
5591
5592 pub fn is_whitespace(&self, c: char) -> bool {
5593 self.kind(c) == CharKind::Whitespace
5594 }
5595
5596 pub fn is_word(&self, c: char) -> bool {
5597 self.kind(c) == CharKind::Word
5598 }
5599
5600 pub fn is_punctuation(&self, c: char) -> bool {
5601 self.kind(c) == CharKind::Punctuation
5602 }
5603
5604 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5605 if c.is_alphanumeric() || c == '_' {
5606 return CharKind::Word;
5607 }
5608
5609 if let Some(scope) = &self.scope {
5610 let characters = match self.scope_context {
5611 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5612 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5613 None => scope.word_characters(),
5614 };
5615 if let Some(characters) = characters
5616 && characters.contains(&c)
5617 {
5618 return CharKind::Word;
5619 }
5620 }
5621
5622 if c.is_whitespace() {
5623 return CharKind::Whitespace;
5624 }
5625
5626 if ignore_punctuation {
5627 CharKind::Word
5628 } else {
5629 CharKind::Punctuation
5630 }
5631 }
5632
5633 pub fn kind(&self, c: char) -> CharKind {
5634 self.kind_with(c, self.ignore_punctuation)
5635 }
5636}
5637
5638/// Find all of the ranges of whitespace that occur at the ends of lines
5639/// in the given rope.
5640///
5641/// This could also be done with a regex search, but this implementation
5642/// avoids copying text.
5643pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5644 let mut ranges = Vec::new();
5645
5646 let mut offset = 0;
5647 let mut prev_chunk_trailing_whitespace_range = 0..0;
5648 for chunk in rope.chunks() {
5649 let mut prev_line_trailing_whitespace_range = 0..0;
5650 for (i, line) in chunk.split('\n').enumerate() {
5651 let line_end_offset = offset + line.len();
5652 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5653 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5654
5655 if i == 0 && trimmed_line_len == 0 {
5656 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5657 }
5658 if !prev_line_trailing_whitespace_range.is_empty() {
5659 ranges.push(prev_line_trailing_whitespace_range);
5660 }
5661
5662 offset = line_end_offset + 1;
5663 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5664 }
5665
5666 offset -= 1;
5667 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5668 }
5669
5670 if !prev_chunk_trailing_whitespace_range.is_empty() {
5671 ranges.push(prev_chunk_trailing_whitespace_range);
5672 }
5673
5674 ranges
5675}