1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// The ID provided by the dynamic registration that produced this diagnostic.
249 pub registration_id: Option<SharedString>,
250 /// A machine-readable code that identifies this diagnostic.
251 pub code: Option<NumberOrString>,
252 pub code_description: Option<lsp::Uri>,
253 /// Whether this diagnostic is a hint, warning, or error.
254 pub severity: DiagnosticSeverity,
255 /// The human-readable message associated with this diagnostic.
256 pub message: String,
257 /// The human-readable message (in markdown format)
258 pub markdown: Option<String>,
259 /// An id that identifies the group to which this diagnostic belongs.
260 ///
261 /// When a language server produces a diagnostic with
262 /// one or more associated diagnostics, those diagnostics are all
263 /// assigned a single group ID.
264 pub group_id: usize,
265 /// Whether this diagnostic is the primary diagnostic for its group.
266 ///
267 /// In a given group, the primary diagnostic is the top-level diagnostic
268 /// returned by the language server. The non-primary diagnostics are the
269 /// associated diagnostics.
270 pub is_primary: bool,
271 /// Whether this diagnostic is considered to originate from an analysis of
272 /// files on disk, as opposed to any unsaved buffer contents. This is a
273 /// property of a given diagnostic source, and is configured for a given
274 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
275 /// for the language server.
276 pub is_disk_based: bool,
277 /// Whether this diagnostic marks unnecessary code.
278 pub is_unnecessary: bool,
279 /// Quick separation of diagnostics groups based by their source.
280 pub source_kind: DiagnosticSourceKind,
281 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
282 pub data: Option<Value>,
283 /// Whether to underline the corresponding text range in the editor.
284 pub underline: bool,
285}
286
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
288pub enum DiagnosticSourceKind {
289 Pulled,
290 Pushed,
291 Other,
292}
293
294/// An operation used to synchronize this buffer with its other replicas.
295#[derive(Clone, Debug, PartialEq)]
296pub enum Operation {
297 /// A text operation.
298 Buffer(text::Operation),
299
300 /// An update to the buffer's diagnostics.
301 UpdateDiagnostics {
302 /// The id of the language server that produced the new diagnostics.
303 server_id: LanguageServerId,
304 /// The diagnostics.
305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 },
309
310 /// An update to the most recent selections in this buffer.
311 UpdateSelections {
312 /// The selections.
313 selections: Arc<[Selection<Anchor>]>,
314 /// The buffer's lamport timestamp.
315 lamport_timestamp: clock::Lamport,
316 /// Whether the selections are in 'line mode'.
317 line_mode: bool,
318 /// The [`CursorShape`] associated with these selections.
319 cursor_shape: CursorShape,
320 },
321
322 /// An update to the characters that should trigger autocompletion
323 /// for this buffer.
324 UpdateCompletionTriggers {
325 /// The characters that trigger autocompletion.
326 triggers: Vec<String>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// The language server ID.
330 server_id: LanguageServerId,
331 },
332
333 /// An update to the line ending type of this buffer.
334 UpdateLineEnding {
335 /// The line ending type.
336 line_ending: LineEnding,
337 /// The buffer's lamport timestamp.
338 lamport_timestamp: clock::Lamport,
339 },
340}
341
342/// An event that occurs in a buffer.
343#[derive(Clone, Debug, PartialEq)]
344pub enum BufferEvent {
345 /// The buffer was changed in a way that must be
346 /// propagated to its other replicas.
347 Operation {
348 operation: Operation,
349 is_local: bool,
350 },
351 /// The buffer was edited.
352 Edited,
353 /// The buffer's `dirty` bit changed.
354 DirtyChanged,
355 /// The buffer was saved.
356 Saved,
357 /// The buffer's file was changed on disk.
358 FileHandleChanged,
359 /// The buffer was reloaded.
360 Reloaded,
361 /// The buffer is in need of a reload
362 ReloadNeeded,
363 /// The buffer's language was changed.
364 /// The boolean indicates whether this buffer did not have a language before, but does now.
365 LanguageChanged(bool),
366 /// The buffer's syntax trees were updated.
367 Reparsed,
368 /// The buffer's diagnostics were updated.
369 DiagnosticsUpdated,
370 /// The buffer gained or lost editing capabilities.
371 CapabilityChanged,
372}
373
374/// The file associated with a buffer.
375pub trait File: Send + Sync + Any {
376 /// Returns the [`LocalFile`] associated with this file, if the
377 /// file is local.
378 fn as_local(&self) -> Option<&dyn LocalFile>;
379
380 /// Returns whether this file is local.
381 fn is_local(&self) -> bool {
382 self.as_local().is_some()
383 }
384
385 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
386 /// only available in some states, such as modification time.
387 fn disk_state(&self) -> DiskState;
388
389 /// Returns the path of this file relative to the worktree's root directory.
390 fn path(&self) -> &Arc<RelPath>;
391
392 /// Returns the path of this file relative to the worktree's parent directory (this means it
393 /// includes the name of the worktree's root folder).
394 fn full_path(&self, cx: &App) -> PathBuf;
395
396 /// Returns the path style of this file.
397 fn path_style(&self, cx: &App) -> PathStyle;
398
399 /// Returns the last component of this handle's absolute path. If this handle refers to the root
400 /// of its worktree, then this method will return the name of the worktree itself.
401 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
402
403 /// Returns the id of the worktree to which this file belongs.
404 ///
405 /// This is needed for looking up project-specific settings.
406 fn worktree_id(&self, cx: &App) -> WorktreeId;
407
408 /// Converts this file into a protobuf message.
409 fn to_proto(&self, cx: &App) -> rpc::proto::File;
410
411 /// Return whether Zed considers this to be a private file.
412 fn is_private(&self) -> bool;
413}
414
415/// The file's storage status - whether it's stored (`Present`), and if so when it was last
416/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
417/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
418/// indicator for new files.
419#[derive(Copy, Clone, Debug, PartialEq)]
420pub enum DiskState {
421 /// File created in Zed that has not been saved.
422 New,
423 /// File present on the filesystem.
424 Present { mtime: MTime },
425 /// Deleted file that was previously present.
426 Deleted,
427}
428
429impl DiskState {
430 /// Returns the file's last known modification time on disk.
431 pub fn mtime(self) -> Option<MTime> {
432 match self {
433 DiskState::New => None,
434 DiskState::Present { mtime } => Some(mtime),
435 DiskState::Deleted => None,
436 }
437 }
438
439 pub fn exists(&self) -> bool {
440 match self {
441 DiskState::New => false,
442 DiskState::Present { .. } => true,
443 DiskState::Deleted => false,
444 }
445 }
446}
447
448/// The file associated with a buffer, in the case where the file is on the local disk.
449pub trait LocalFile: File {
450 /// Returns the absolute path of this file
451 fn abs_path(&self, cx: &App) -> PathBuf;
452
453 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
454 fn load(&self, cx: &App) -> Task<Result<String>>;
455
456 /// Loads the file's contents from disk.
457 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
458}
459
460/// The auto-indent behavior associated with an editing operation.
461/// For some editing operations, each affected line of text has its
462/// indentation recomputed. For other operations, the entire block
463/// of edited text is adjusted uniformly.
464#[derive(Clone, Debug)]
465pub enum AutoindentMode {
466 /// Indent each line of inserted text.
467 EachLine,
468 /// Apply the same indentation adjustment to all of the lines
469 /// in a given insertion.
470 Block {
471 /// The original indentation column of the first line of each
472 /// insertion, if it has been copied.
473 ///
474 /// Knowing this makes it possible to preserve the relative indentation
475 /// of every line in the insertion from when it was copied.
476 ///
477 /// If the original indent column is `a`, and the first line of insertion
478 /// is then auto-indented to column `b`, then every other line of
479 /// the insertion will be auto-indented to column `b - a`
480 original_indent_columns: Vec<Option<u32>>,
481 },
482}
483
484#[derive(Clone)]
485struct AutoindentRequest {
486 before_edit: BufferSnapshot,
487 entries: Vec<AutoindentRequestEntry>,
488 is_block_mode: bool,
489 ignore_empty_lines: bool,
490}
491
492#[derive(Debug, Clone)]
493struct AutoindentRequestEntry {
494 /// A range of the buffer whose indentation should be adjusted.
495 range: Range<Anchor>,
496 /// Whether or not these lines should be considered brand new, for the
497 /// purpose of auto-indent. When text is not new, its indentation will
498 /// only be adjusted if the suggested indentation level has *changed*
499 /// since the edit was made.
500 first_line_is_new: bool,
501 indent_size: IndentSize,
502 original_indent_column: Option<u32>,
503}
504
505#[derive(Debug)]
506struct IndentSuggestion {
507 basis_row: u32,
508 delta: Ordering,
509 within_error: bool,
510}
511
512struct BufferChunkHighlights<'a> {
513 captures: SyntaxMapCaptures<'a>,
514 next_capture: Option<SyntaxMapCapture<'a>>,
515 stack: Vec<(usize, HighlightId)>,
516 highlight_maps: Vec<HighlightMap>,
517}
518
519/// An iterator that yields chunks of a buffer's text, along with their
520/// syntax highlights and diagnostic status.
521pub struct BufferChunks<'a> {
522 buffer_snapshot: Option<&'a BufferSnapshot>,
523 range: Range<usize>,
524 chunks: text::Chunks<'a>,
525 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
526 error_depth: usize,
527 warning_depth: usize,
528 information_depth: usize,
529 hint_depth: usize,
530 unnecessary_depth: usize,
531 underline: bool,
532 highlights: Option<BufferChunkHighlights<'a>>,
533}
534
535/// A chunk of a buffer's text, along with its syntax highlight and
536/// diagnostic status.
537#[derive(Clone, Debug, Default)]
538pub struct Chunk<'a> {
539 /// The text of the chunk.
540 pub text: &'a str,
541 /// The syntax highlighting style of the chunk.
542 pub syntax_highlight_id: Option<HighlightId>,
543 /// The highlight style that has been applied to this chunk in
544 /// the editor.
545 pub highlight_style: Option<HighlightStyle>,
546 /// The severity of diagnostic associated with this chunk, if any.
547 pub diagnostic_severity: Option<DiagnosticSeverity>,
548 /// A bitset of which characters are tabs in this string.
549 pub tabs: u128,
550 /// Bitmap of character indices in this chunk
551 pub chars: u128,
552 /// Whether this chunk of text is marked as unnecessary.
553 pub is_unnecessary: bool,
554 /// Whether this chunk of text was originally a tab character.
555 pub is_tab: bool,
556 /// Whether this chunk of text was originally an inlay.
557 pub is_inlay: bool,
558 /// Whether to underline the corresponding text range in the editor.
559 pub underline: bool,
560}
561
562/// A set of edits to a given version of a buffer, computed asynchronously.
563#[derive(Debug)]
564pub struct Diff {
565 pub base_version: clock::Global,
566 pub line_ending: LineEnding,
567 pub edits: Vec<(Range<usize>, Arc<str>)>,
568}
569
570#[derive(Debug, Clone, Copy)]
571pub(crate) struct DiagnosticEndpoint {
572 offset: usize,
573 is_start: bool,
574 underline: bool,
575 severity: DiagnosticSeverity,
576 is_unnecessary: bool,
577}
578
579/// A class of characters, used for characterizing a run of text.
580#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
581pub enum CharKind {
582 /// Whitespace.
583 Whitespace,
584 /// Punctuation.
585 Punctuation,
586 /// Word.
587 Word,
588}
589
590/// Context for character classification within a specific scope.
591#[derive(Copy, Clone, Eq, PartialEq, Debug)]
592pub enum CharScopeContext {
593 /// Character classification for completion queries.
594 ///
595 /// This context treats certain characters as word constituents that would
596 /// normally be considered punctuation, such as '-' in Tailwind classes
597 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
598 Completion,
599 /// Character classification for linked edits.
600 ///
601 /// This context handles characters that should be treated as part of
602 /// identifiers during linked editing operations, such as '.' in JSX
603 /// component names like `<Animated.View>`.
604 LinkedEdit,
605}
606
607/// A runnable is a set of data about a region that could be resolved into a task
608pub struct Runnable {
609 pub tags: SmallVec<[RunnableTag; 1]>,
610 pub language: Arc<Language>,
611 pub buffer: BufferId,
612}
613
614#[derive(Default, Clone, Debug)]
615pub struct HighlightedText {
616 pub text: SharedString,
617 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
618}
619
620#[derive(Default, Debug)]
621struct HighlightedTextBuilder {
622 pub text: String,
623 highlights: Vec<(Range<usize>, HighlightStyle)>,
624}
625
626impl HighlightedText {
627 pub fn from_buffer_range<T: ToOffset>(
628 range: Range<T>,
629 snapshot: &text::BufferSnapshot,
630 syntax_snapshot: &SyntaxSnapshot,
631 override_style: Option<HighlightStyle>,
632 syntax_theme: &SyntaxTheme,
633 ) -> Self {
634 let mut highlighted_text = HighlightedTextBuilder::default();
635 highlighted_text.add_text_from_buffer_range(
636 range,
637 snapshot,
638 syntax_snapshot,
639 override_style,
640 syntax_theme,
641 );
642 highlighted_text.build()
643 }
644
645 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
646 gpui::StyledText::new(self.text.clone())
647 .with_default_highlights(default_style, self.highlights.iter().cloned())
648 }
649
650 /// Returns the first line without leading whitespace unless highlighted
651 /// and a boolean indicating if there are more lines after
652 pub fn first_line_preview(self) -> (Self, bool) {
653 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
654 let first_line = &self.text[..newline_ix];
655
656 // Trim leading whitespace, unless an edit starts prior to it.
657 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
658 if let Some((first_highlight_range, _)) = self.highlights.first() {
659 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
660 }
661
662 let preview_text = &first_line[preview_start_ix..];
663 let preview_highlights = self
664 .highlights
665 .into_iter()
666 .skip_while(|(range, _)| range.end <= preview_start_ix)
667 .take_while(|(range, _)| range.start < newline_ix)
668 .filter_map(|(mut range, highlight)| {
669 range.start = range.start.saturating_sub(preview_start_ix);
670 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
671 if range.is_empty() {
672 None
673 } else {
674 Some((range, highlight))
675 }
676 });
677
678 let preview = Self {
679 text: SharedString::new(preview_text),
680 highlights: preview_highlights.collect(),
681 };
682
683 (preview, self.text.len() > newline_ix)
684 }
685}
686
687impl HighlightedTextBuilder {
688 pub fn build(self) -> HighlightedText {
689 HighlightedText {
690 text: self.text.into(),
691 highlights: self.highlights,
692 }
693 }
694
695 pub fn add_text_from_buffer_range<T: ToOffset>(
696 &mut self,
697 range: Range<T>,
698 snapshot: &text::BufferSnapshot,
699 syntax_snapshot: &SyntaxSnapshot,
700 override_style: Option<HighlightStyle>,
701 syntax_theme: &SyntaxTheme,
702 ) {
703 let range = range.to_offset(snapshot);
704 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
705 let start = self.text.len();
706 self.text.push_str(chunk.text);
707 let end = self.text.len();
708
709 if let Some(highlight_style) = chunk
710 .syntax_highlight_id
711 .and_then(|id| id.style(syntax_theme))
712 {
713 let highlight_style = override_style.map_or(highlight_style, |override_style| {
714 highlight_style.highlight(override_style)
715 });
716 self.highlights.push((start..end, highlight_style));
717 } else if let Some(override_style) = override_style {
718 self.highlights.push((start..end, override_style));
719 }
720 }
721 }
722
723 fn highlighted_chunks<'a>(
724 range: Range<usize>,
725 snapshot: &'a text::BufferSnapshot,
726 syntax_snapshot: &'a SyntaxSnapshot,
727 ) -> BufferChunks<'a> {
728 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
729 grammar
730 .highlights_config
731 .as_ref()
732 .map(|config| &config.query)
733 });
734
735 let highlight_maps = captures
736 .grammars()
737 .iter()
738 .map(|grammar| grammar.highlight_map())
739 .collect();
740
741 BufferChunks::new(
742 snapshot.as_rope(),
743 range,
744 Some((captures, highlight_maps)),
745 false,
746 None,
747 )
748 }
749}
750
751#[derive(Clone)]
752pub struct EditPreview {
753 old_snapshot: text::BufferSnapshot,
754 applied_edits_snapshot: text::BufferSnapshot,
755 syntax_snapshot: SyntaxSnapshot,
756}
757
758impl EditPreview {
759 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first.start.to_point(&self.old_snapshot);
764 let old_end = last.end.to_point(&self.old_snapshot);
765 let new_end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 let start = Point::new(start.row.saturating_sub(3), 0);
771 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
772 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
773
774 Some(unified_diff(
775 &self
776 .old_snapshot
777 .text_for_range(start..old_end)
778 .collect::<String>(),
779 &self
780 .applied_edits_snapshot
781 .text_for_range(start..new_end)
782 .collect::<String>(),
783 ))
784 }
785
786 pub fn highlight_edits(
787 &self,
788 current_snapshot: &BufferSnapshot,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 include_deletions: bool,
791 cx: &App,
792 ) -> HighlightedText {
793 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
794 return HighlightedText::default();
795 };
796
797 let mut highlighted_text = HighlightedTextBuilder::default();
798
799 let visible_range_in_preview_snapshot =
800 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
801 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
802
803 let insertion_highlight_style = HighlightStyle {
804 background_color: Some(cx.theme().status().created_background),
805 ..Default::default()
806 };
807 let deletion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().deleted_background),
809 ..Default::default()
810 };
811 let syntax_theme = cx.theme().syntax();
812
813 for (range, edit_text) in edits {
814 let edit_new_end_in_preview_snapshot = range
815 .end
816 .bias_right(&self.old_snapshot)
817 .to_offset(&self.applied_edits_snapshot);
818 let edit_start_in_preview_snapshot =
819 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
820
821 let unchanged_range_in_preview_snapshot =
822 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
823 if !unchanged_range_in_preview_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 unchanged_range_in_preview_snapshot,
826 &self.applied_edits_snapshot,
827 &self.syntax_snapshot,
828 None,
829 syntax_theme,
830 );
831 }
832
833 let range_in_current_snapshot = range.to_offset(current_snapshot);
834 if include_deletions && !range_in_current_snapshot.is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 range_in_current_snapshot,
837 ¤t_snapshot.text,
838 ¤t_snapshot.syntax,
839 Some(deletion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 if !edit_text.as_ref().is_empty() {
845 highlighted_text.add_text_from_buffer_range(
846 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
847 &self.applied_edits_snapshot,
848 &self.syntax_snapshot,
849 Some(insertion_highlight_style),
850 syntax_theme,
851 );
852 }
853
854 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
855 }
856
857 highlighted_text.add_text_from_buffer_range(
858 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
859 &self.applied_edits_snapshot,
860 &self.syntax_snapshot,
861 None,
862 syntax_theme,
863 );
864
865 highlighted_text.build()
866 }
867
868 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
869 cx.new(|cx| {
870 let mut buffer = Buffer::local_normalized(
871 self.applied_edits_snapshot.as_rope().clone(),
872 self.applied_edits_snapshot.line_ending(),
873 cx,
874 );
875 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
876 buffer
877 })
878 }
879
880 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
881 let (first, _) = edits.first()?;
882 let (last, _) = edits.last()?;
883
884 let start = first
885 .start
886 .bias_left(&self.old_snapshot)
887 .to_point(&self.applied_edits_snapshot);
888 let end = last
889 .end
890 .bias_right(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892
893 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
894 let range = Point::new(start.row, 0)
895 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
896
897 Some(range)
898 }
899}
900
901#[derive(Clone, Debug, PartialEq, Eq)]
902pub struct BracketMatch<T> {
903 pub open_range: Range<T>,
904 pub close_range: Range<T>,
905 pub newline_only: bool,
906 pub syntax_layer_depth: usize,
907 pub color_index: Option<usize>,
908}
909
910impl<T> BracketMatch<T> {
911 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
912 (self.open_range, self.close_range)
913 }
914}
915
916impl Buffer {
917 /// Create a new buffer with the given base text.
918 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
919 Self::build(
920 TextBuffer::new(
921 ReplicaId::LOCAL,
922 cx.entity_id().as_non_zero_u64().into(),
923 base_text.into(),
924 ),
925 None,
926 Capability::ReadWrite,
927 )
928 }
929
930 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
931 pub fn local_normalized(
932 base_text_normalized: Rope,
933 line_ending: LineEnding,
934 cx: &Context<Self>,
935 ) -> Self {
936 Self::build(
937 TextBuffer::new_normalized(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 line_ending,
941 base_text_normalized,
942 ),
943 None,
944 Capability::ReadWrite,
945 )
946 }
947
948 /// Create a new buffer that is a replica of a remote buffer.
949 pub fn remote(
950 remote_id: BufferId,
951 replica_id: ReplicaId,
952 capability: Capability,
953 base_text: impl Into<String>,
954 ) -> Self {
955 Self::build(
956 TextBuffer::new(replica_id, remote_id, base_text.into()),
957 None,
958 capability,
959 )
960 }
961
962 /// Create a new buffer that is a replica of a remote buffer, populating its
963 /// state from the given protobuf message.
964 pub fn from_proto(
965 replica_id: ReplicaId,
966 capability: Capability,
967 message: proto::BufferState,
968 file: Option<Arc<dyn File>>,
969 ) -> Result<Self> {
970 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
971 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
972 let mut this = Self::build(buffer, file, capability);
973 this.text.set_line_ending(proto::deserialize_line_ending(
974 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
975 ));
976 this.saved_version = proto::deserialize_version(&message.saved_version);
977 this.saved_mtime = message.saved_mtime.map(|time| time.into());
978 Ok(this)
979 }
980
981 /// Serialize the buffer's state to a protobuf message.
982 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
983 proto::BufferState {
984 id: self.remote_id().into(),
985 file: self.file.as_ref().map(|f| f.to_proto(cx)),
986 base_text: self.base_text().to_string(),
987 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
988 saved_version: proto::serialize_version(&self.saved_version),
989 saved_mtime: self.saved_mtime.map(|time| time.into()),
990 }
991 }
992
993 /// Serialize as protobufs all of the changes to the buffer since the given version.
994 pub fn serialize_ops(
995 &self,
996 since: Option<clock::Global>,
997 cx: &App,
998 ) -> Task<Vec<proto::Operation>> {
999 let mut operations = Vec::new();
1000 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1001
1002 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1003 proto::serialize_operation(&Operation::UpdateSelections {
1004 selections: set.selections.clone(),
1005 lamport_timestamp: set.lamport_timestamp,
1006 line_mode: set.line_mode,
1007 cursor_shape: set.cursor_shape,
1008 })
1009 }));
1010
1011 for (server_id, diagnostics) in &self.diagnostics {
1012 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1013 lamport_timestamp: self.diagnostics_timestamp,
1014 server_id: *server_id,
1015 diagnostics: diagnostics.iter().cloned().collect(),
1016 }));
1017 }
1018
1019 for (server_id, completions) in &self.completion_triggers_per_language_server {
1020 operations.push(proto::serialize_operation(
1021 &Operation::UpdateCompletionTriggers {
1022 triggers: completions.iter().cloned().collect(),
1023 lamport_timestamp: self.completion_triggers_timestamp,
1024 server_id: *server_id,
1025 },
1026 ));
1027 }
1028
1029 let text_operations = self.text.operations().clone();
1030 cx.background_spawn(async move {
1031 let since = since.unwrap_or_default();
1032 operations.extend(
1033 text_operations
1034 .iter()
1035 .filter(|(_, op)| !since.observed(op.timestamp()))
1036 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1037 );
1038 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1039 operations
1040 })
1041 }
1042
1043 /// Assign a language to the buffer, returning the buffer.
1044 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1045 self.set_language_async(Some(language), cx);
1046 self
1047 }
1048
1049 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1050 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1051 self.set_language(Some(language), cx);
1052 self
1053 }
1054
1055 /// Returns the [`Capability`] of this buffer.
1056 pub fn capability(&self) -> Capability {
1057 self.capability
1058 }
1059
1060 /// Whether this buffer can only be read.
1061 pub fn read_only(&self) -> bool {
1062 self.capability == Capability::ReadOnly
1063 }
1064
1065 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1066 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1067 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1068 let snapshot = buffer.snapshot();
1069 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1070 let tree_sitter_data = TreeSitterData::new(snapshot);
1071 Self {
1072 saved_mtime,
1073 tree_sitter_data: Arc::new(tree_sitter_data),
1074 saved_version: buffer.version(),
1075 preview_version: buffer.version(),
1076 reload_task: None,
1077 transaction_depth: 0,
1078 was_dirty_before_starting_transaction: None,
1079 has_unsaved_edits: Cell::new((buffer.version(), false)),
1080 text: buffer,
1081 branch_state: None,
1082 file,
1083 capability,
1084 syntax_map,
1085 reparse: None,
1086 non_text_state_update_count: 0,
1087 sync_parse_timeout: Duration::from_millis(1),
1088 parse_status: watch::channel(ParseStatus::Idle),
1089 autoindent_requests: Default::default(),
1090 wait_for_autoindent_txs: Default::default(),
1091 pending_autoindent: Default::default(),
1092 language: None,
1093 remote_selections: Default::default(),
1094 diagnostics: Default::default(),
1095 diagnostics_timestamp: Lamport::MIN,
1096 completion_triggers: Default::default(),
1097 completion_triggers_per_language_server: Default::default(),
1098 completion_triggers_timestamp: Lamport::MIN,
1099 deferred_ops: OperationQueue::new(),
1100 has_conflict: false,
1101 change_bits: Default::default(),
1102 _subscriptions: Vec::new(),
1103 }
1104 }
1105
1106 pub fn build_snapshot(
1107 text: Rope,
1108 language: Option<Arc<Language>>,
1109 language_registry: Option<Arc<LanguageRegistry>>,
1110 cx: &mut App,
1111 ) -> impl Future<Output = BufferSnapshot> + use<> {
1112 let entity_id = cx.reserve_entity::<Self>().entity_id();
1113 let buffer_id = entity_id.as_non_zero_u64().into();
1114 async move {
1115 let text =
1116 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1117 .snapshot();
1118 let mut syntax = SyntaxMap::new(&text).snapshot();
1119 if let Some(language) = language.clone() {
1120 let language_registry = language_registry.clone();
1121 syntax.reparse(&text, language_registry, language);
1122 }
1123 let tree_sitter_data = TreeSitterData::new(text.clone());
1124 BufferSnapshot {
1125 text,
1126 syntax,
1127 file: None,
1128 diagnostics: Default::default(),
1129 remote_selections: Default::default(),
1130 tree_sitter_data: Arc::new(tree_sitter_data),
1131 language,
1132 non_text_state_update_count: 0,
1133 }
1134 }
1135 }
1136
1137 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1138 let entity_id = cx.reserve_entity::<Self>().entity_id();
1139 let buffer_id = entity_id.as_non_zero_u64().into();
1140 let text = TextBuffer::new_normalized(
1141 ReplicaId::LOCAL,
1142 buffer_id,
1143 Default::default(),
1144 Rope::new(),
1145 )
1146 .snapshot();
1147 let syntax = SyntaxMap::new(&text).snapshot();
1148 let tree_sitter_data = TreeSitterData::new(text.clone());
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(tree_sitter_data),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language: None,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 #[cfg(any(test, feature = "test-support"))]
1162 pub fn build_snapshot_sync(
1163 text: Rope,
1164 language: Option<Arc<Language>>,
1165 language_registry: Option<Arc<LanguageRegistry>>,
1166 cx: &mut App,
1167 ) -> BufferSnapshot {
1168 let entity_id = cx.reserve_entity::<Self>().entity_id();
1169 let buffer_id = entity_id.as_non_zero_u64().into();
1170 let text =
1171 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1172 .snapshot();
1173 let mut syntax = SyntaxMap::new(&text).snapshot();
1174 if let Some(language) = language.clone() {
1175 syntax.reparse(&text, language_registry, language);
1176 }
1177 let tree_sitter_data = TreeSitterData::new(text.clone());
1178 BufferSnapshot {
1179 text,
1180 syntax,
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 file: None,
1183 diagnostics: Default::default(),
1184 remote_selections: Default::default(),
1185 language,
1186 non_text_state_update_count: 0,
1187 }
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's current state. This is computationally
1191 /// cheap, and allows reading from the buffer on a background thread.
1192 pub fn snapshot(&self) -> BufferSnapshot {
1193 let text = self.text.snapshot();
1194 let mut syntax_map = self.syntax_map.lock();
1195 syntax_map.interpolate(&text);
1196 let syntax = syntax_map.snapshot();
1197
1198 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1199 Arc::new(TreeSitterData::new(text.clone()))
1200 } else {
1201 self.tree_sitter_data.clone()
1202 };
1203
1204 BufferSnapshot {
1205 text,
1206 syntax,
1207 tree_sitter_data,
1208 file: self.file.clone(),
1209 remote_selections: self.remote_selections.clone(),
1210 diagnostics: self.diagnostics.clone(),
1211 language: self.language.clone(),
1212 non_text_state_update_count: self.non_text_state_update_count,
1213 }
1214 }
1215
1216 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1217 let this = cx.entity();
1218 cx.new(|cx| {
1219 let mut branch = Self {
1220 branch_state: Some(BufferBranchState {
1221 base_buffer: this.clone(),
1222 merged_operations: Default::default(),
1223 }),
1224 language: self.language.clone(),
1225 has_conflict: self.has_conflict,
1226 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1227 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1228 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1229 };
1230 if let Some(language_registry) = self.language_registry() {
1231 branch.set_language_registry(language_registry);
1232 }
1233
1234 // Reparse the branch buffer so that we get syntax highlighting immediately.
1235 branch.reparse(cx, true);
1236
1237 branch
1238 })
1239 }
1240
1241 pub fn preview_edits(
1242 &self,
1243 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1244 cx: &App,
1245 ) -> Task<EditPreview> {
1246 let registry = self.language_registry();
1247 let language = self.language().cloned();
1248 let old_snapshot = self.text.snapshot();
1249 let mut branch_buffer = self.text.branch();
1250 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1251 cx.background_spawn(async move {
1252 if !edits.is_empty() {
1253 if let Some(language) = language.clone() {
1254 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1255 }
1256
1257 branch_buffer.edit(edits.iter().cloned());
1258 let snapshot = branch_buffer.snapshot();
1259 syntax_snapshot.interpolate(&snapshot);
1260
1261 if let Some(language) = language {
1262 syntax_snapshot.reparse(&snapshot, registry, language);
1263 }
1264 }
1265 EditPreview {
1266 old_snapshot,
1267 applied_edits_snapshot: branch_buffer.snapshot(),
1268 syntax_snapshot,
1269 }
1270 })
1271 }
1272
1273 /// Applies all of the changes in this buffer that intersect any of the
1274 /// given `ranges` to its base buffer.
1275 ///
1276 /// If `ranges` is empty, then all changes will be applied. This buffer must
1277 /// be a branch buffer to call this method.
1278 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1279 let Some(base_buffer) = self.base_buffer() else {
1280 debug_panic!("not a branch buffer");
1281 return;
1282 };
1283
1284 let mut ranges = if ranges.is_empty() {
1285 &[0..usize::MAX]
1286 } else {
1287 ranges.as_slice()
1288 }
1289 .iter()
1290 .peekable();
1291
1292 let mut edits = Vec::new();
1293 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1294 let mut is_included = false;
1295 while let Some(range) = ranges.peek() {
1296 if range.end < edit.new.start {
1297 ranges.next().unwrap();
1298 } else {
1299 if range.start <= edit.new.end {
1300 is_included = true;
1301 }
1302 break;
1303 }
1304 }
1305
1306 if is_included {
1307 edits.push((
1308 edit.old.clone(),
1309 self.text_for_range(edit.new.clone()).collect::<String>(),
1310 ));
1311 }
1312 }
1313
1314 let operation = base_buffer.update(cx, |base_buffer, cx| {
1315 // cx.emit(BufferEvent::DiffBaseChanged);
1316 base_buffer.edit(edits, None, cx)
1317 });
1318
1319 if let Some(operation) = operation
1320 && let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 {
1324 merged_operations.push(operation);
1325 }
1326 }
1327
1328 fn on_base_buffer_event(
1329 &mut self,
1330 _: Entity<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 let BufferEvent::Operation { operation, .. } = event else {
1335 return;
1336 };
1337 let Some(BufferBranchState {
1338 merged_operations, ..
1339 }) = &mut self.branch_state
1340 else {
1341 return;
1342 };
1343
1344 let mut operation_to_undo = None;
1345 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1346 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1347 {
1348 merged_operations.remove(ix);
1349 operation_to_undo = Some(operation.timestamp);
1350 }
1351
1352 self.apply_ops([operation.clone()], cx);
1353
1354 if let Some(timestamp) = operation_to_undo {
1355 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1356 self.undo_operations(counts, cx);
1357 }
1358 }
1359
1360 #[cfg(test)]
1361 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1362 &self.text
1363 }
1364
1365 /// Retrieve a snapshot of the buffer's raw text, without any
1366 /// language-related state like the syntax tree or diagnostics.
1367 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1368 self.text.snapshot()
1369 }
1370
1371 /// The file associated with the buffer, if any.
1372 pub fn file(&self) -> Option<&Arc<dyn File>> {
1373 self.file.as_ref()
1374 }
1375
1376 /// The version of the buffer that was last saved or reloaded from disk.
1377 pub fn saved_version(&self) -> &clock::Global {
1378 &self.saved_version
1379 }
1380
1381 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1382 pub fn saved_mtime(&self) -> Option<MTime> {
1383 self.saved_mtime
1384 }
1385
1386 /// Assign a language to the buffer.
1387 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1388 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1389 }
1390
1391 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1392 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1393 self.set_language_(language, true, cx);
1394 }
1395
1396 fn set_language_(
1397 &mut self,
1398 language: Option<Arc<Language>>,
1399 may_block: bool,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.non_text_state_update_count += 1;
1403 self.syntax_map.lock().clear(&self.text);
1404 let old_language = std::mem::replace(&mut self.language, language);
1405 self.was_changed();
1406 self.reparse(cx, may_block);
1407 let has_fresh_language =
1408 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1409 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1410 }
1411
1412 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1413 /// other languages if parts of the buffer are written in different languages.
1414 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1415 self.syntax_map
1416 .lock()
1417 .set_language_registry(language_registry);
1418 }
1419
1420 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1421 self.syntax_map.lock().language_registry()
1422 }
1423
1424 /// Assign the line ending type to the buffer.
1425 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1426 self.text.set_line_ending(line_ending);
1427
1428 let lamport_timestamp = self.text.lamport_clock.tick();
1429 self.send_operation(
1430 Operation::UpdateLineEnding {
1431 line_ending,
1432 lamport_timestamp,
1433 },
1434 true,
1435 cx,
1436 );
1437 }
1438
1439 /// Assign the buffer a new [`Capability`].
1440 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1441 if self.capability != capability {
1442 self.capability = capability;
1443 cx.emit(BufferEvent::CapabilityChanged)
1444 }
1445 }
1446
1447 /// This method is called to signal that the buffer has been saved.
1448 pub fn did_save(
1449 &mut self,
1450 version: clock::Global,
1451 mtime: Option<MTime>,
1452 cx: &mut Context<Self>,
1453 ) {
1454 self.saved_version = version.clone();
1455 self.has_unsaved_edits.set((version, false));
1456 self.has_conflict = false;
1457 self.saved_mtime = mtime;
1458 self.was_changed();
1459 cx.emit(BufferEvent::Saved);
1460 cx.notify();
1461 }
1462
1463 /// Reloads the contents of the buffer from disk.
1464 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1465 let (tx, rx) = futures::channel::oneshot::channel();
1466 let prev_version = self.text.version();
1467 self.reload_task = Some(cx.spawn(async move |this, cx| {
1468 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1469 let file = this.file.as_ref()?.as_local()?;
1470
1471 Some((file.disk_state().mtime(), file.load(cx)))
1472 })?
1473 else {
1474 return Ok(());
1475 };
1476
1477 let new_text = new_text.await?;
1478 let diff = this
1479 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1480 .await;
1481 this.update(cx, |this, cx| {
1482 if this.version() == diff.base_version {
1483 this.finalize_last_transaction();
1484 this.apply_diff(diff, cx);
1485 tx.send(this.finalize_last_transaction().cloned()).ok();
1486 this.has_conflict = false;
1487 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1488 } else {
1489 if !diff.edits.is_empty()
1490 || this
1491 .edits_since::<usize>(&diff.base_version)
1492 .next()
1493 .is_some()
1494 {
1495 this.has_conflict = true;
1496 }
1497
1498 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1499 }
1500
1501 this.reload_task.take();
1502 })
1503 }));
1504 rx
1505 }
1506
1507 /// This method is called to signal that the buffer has been reloaded.
1508 pub fn did_reload(
1509 &mut self,
1510 version: clock::Global,
1511 line_ending: LineEnding,
1512 mtime: Option<MTime>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 self.saved_version = version;
1516 self.has_unsaved_edits
1517 .set((self.saved_version.clone(), false));
1518 self.text.set_line_ending(line_ending);
1519 self.saved_mtime = mtime;
1520 cx.emit(BufferEvent::Reloaded);
1521 cx.notify();
1522 }
1523
1524 /// Updates the [`File`] backing this buffer. This should be called when
1525 /// the file has changed or has been deleted.
1526 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1527 let was_dirty = self.is_dirty();
1528 let mut file_changed = false;
1529
1530 if let Some(old_file) = self.file.as_ref() {
1531 if new_file.path() != old_file.path() {
1532 file_changed = true;
1533 }
1534
1535 let old_state = old_file.disk_state();
1536 let new_state = new_file.disk_state();
1537 if old_state != new_state {
1538 file_changed = true;
1539 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1540 cx.emit(BufferEvent::ReloadNeeded)
1541 }
1542 }
1543 } else {
1544 file_changed = true;
1545 };
1546
1547 self.file = Some(new_file);
1548 if file_changed {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 if was_dirty != self.is_dirty() {
1552 cx.emit(BufferEvent::DirtyChanged);
1553 }
1554 cx.emit(BufferEvent::FileHandleChanged);
1555 cx.notify();
1556 }
1557 }
1558
1559 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1560 Some(self.branch_state.as_ref()?.base_buffer.clone())
1561 }
1562
1563 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1564 pub fn language(&self) -> Option<&Arc<Language>> {
1565 self.language.as_ref()
1566 }
1567
1568 /// Returns the [`Language`] at the given location.
1569 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1570 let offset = position.to_offset(self);
1571 let mut is_first = true;
1572 let start_anchor = self.anchor_before(offset);
1573 let end_anchor = self.anchor_after(offset);
1574 self.syntax_map
1575 .lock()
1576 .layers_for_range(offset..offset, &self.text, false)
1577 .filter(|layer| {
1578 if is_first {
1579 is_first = false;
1580 return true;
1581 }
1582
1583 layer
1584 .included_sub_ranges
1585 .map(|sub_ranges| {
1586 sub_ranges.iter().any(|sub_range| {
1587 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1588 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1589 !is_before_start && !is_after_end
1590 })
1591 })
1592 .unwrap_or(true)
1593 })
1594 .last()
1595 .map(|info| info.language.clone())
1596 .or_else(|| self.language.clone())
1597 }
1598
1599 /// Returns each [`Language`] for the active syntax layers at the given location.
1600 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1601 let offset = position.to_offset(self);
1602 let mut languages: Vec<Arc<Language>> = self
1603 .syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .map(|info| info.language.clone())
1607 .collect();
1608
1609 if languages.is_empty()
1610 && let Some(buffer_language) = self.language()
1611 {
1612 languages.push(buffer_language.clone());
1613 }
1614
1615 languages
1616 }
1617
1618 /// An integer version number that accounts for all updates besides
1619 /// the buffer's text itself (which is versioned via a version vector).
1620 pub fn non_text_state_update_count(&self) -> usize {
1621 self.non_text_state_update_count
1622 }
1623
1624 /// Whether the buffer is being parsed in the background.
1625 #[cfg(any(test, feature = "test-support"))]
1626 pub fn is_parsing(&self) -> bool {
1627 self.reparse.is_some()
1628 }
1629
1630 /// Indicates whether the buffer contains any regions that may be
1631 /// written in a language that hasn't been loaded yet.
1632 pub fn contains_unknown_injections(&self) -> bool {
1633 self.syntax_map.lock().contains_unknown_injections()
1634 }
1635
1636 #[cfg(any(test, feature = "test-support"))]
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1642 match Arc::get_mut(&mut self.tree_sitter_data) {
1643 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1644 None => {
1645 let tree_sitter_data = TreeSitterData::new(snapshot);
1646 self.tree_sitter_data = Arc::new(tree_sitter_data)
1647 }
1648 }
1649 }
1650
1651 /// Called after an edit to synchronize the buffer's main parse tree with
1652 /// the buffer's new underlying state.
1653 ///
1654 /// Locks the syntax map and interpolates the edits since the last reparse
1655 /// into the foreground syntax tree.
1656 ///
1657 /// Then takes a stable snapshot of the syntax map before unlocking it.
1658 /// The snapshot with the interpolated edits is sent to a background thread,
1659 /// where we ask Tree-sitter to perform an incremental parse.
1660 ///
1661 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1662 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1663 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1664 ///
1665 /// If we time out waiting on the parse, we spawn a second task waiting
1666 /// until the parse does complete and return with the interpolated tree still
1667 /// in the foreground. When the background parse completes, call back into
1668 /// the main thread and assign the foreground parse state.
1669 ///
1670 /// If the buffer or grammar changed since the start of the background parse,
1671 /// initiate an additional reparse recursively. To avoid concurrent parses
1672 /// for the same buffer, we only initiate a new parse if we are not already
1673 /// parsing in the background.
1674 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1675 if self.text.version() != *self.tree_sitter_data.version() {
1676 self.invalidate_tree_sitter_data(self.text.snapshot());
1677 }
1678 if self.reparse.is_some() {
1679 return;
1680 }
1681 let language = if let Some(language) = self.language.clone() {
1682 language
1683 } else {
1684 return;
1685 };
1686
1687 let text = self.text_snapshot();
1688 let parsed_version = self.version();
1689
1690 let mut syntax_map = self.syntax_map.lock();
1691 syntax_map.interpolate(&text);
1692 let language_registry = syntax_map.language_registry();
1693 let mut syntax_snapshot = syntax_map.snapshot();
1694 drop(syntax_map);
1695
1696 let parse_task = cx.background_spawn({
1697 let language = language.clone();
1698 let language_registry = language_registry.clone();
1699 async move {
1700 syntax_snapshot.reparse(&text, language_registry, language);
1701 syntax_snapshot
1702 }
1703 });
1704
1705 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1706 if may_block {
1707 match cx
1708 .background_executor()
1709 .block_with_timeout(self.sync_parse_timeout, parse_task)
1710 {
1711 Ok(new_syntax_snapshot) => {
1712 self.did_finish_parsing(new_syntax_snapshot, cx);
1713 self.reparse = None;
1714 }
1715 Err(parse_task) => {
1716 self.reparse = Some(cx.spawn(async move |this, cx| {
1717 let new_syntax_map = cx.background_spawn(parse_task).await;
1718 this.update(cx, move |this, cx| {
1719 let grammar_changed = || {
1720 this.language.as_ref().is_none_or(|current_language| {
1721 !Arc::ptr_eq(&language, current_language)
1722 })
1723 };
1724 let language_registry_changed = || {
1725 new_syntax_map.contains_unknown_injections()
1726 && language_registry.is_some_and(|registry| {
1727 registry.version()
1728 != new_syntax_map.language_registry_version()
1729 })
1730 };
1731 let parse_again = this.version.changed_since(&parsed_version)
1732 || language_registry_changed()
1733 || grammar_changed();
1734 this.did_finish_parsing(new_syntax_map, cx);
1735 this.reparse = None;
1736 if parse_again {
1737 this.reparse(cx, false);
1738 }
1739 })
1740 .ok();
1741 }));
1742 }
1743 }
1744 } else {
1745 self.reparse = Some(cx.spawn(async move |this, cx| {
1746 let new_syntax_map = cx.background_spawn(parse_task).await;
1747 this.update(cx, move |this, cx| {
1748 let grammar_changed = || {
1749 this.language.as_ref().is_none_or(|current_language| {
1750 !Arc::ptr_eq(&language, current_language)
1751 })
1752 };
1753 let language_registry_changed = || {
1754 new_syntax_map.contains_unknown_injections()
1755 && language_registry.is_some_and(|registry| {
1756 registry.version() != new_syntax_map.language_registry_version()
1757 })
1758 };
1759 let parse_again = this.version.changed_since(&parsed_version)
1760 || language_registry_changed()
1761 || grammar_changed();
1762 this.did_finish_parsing(new_syntax_map, cx);
1763 this.reparse = None;
1764 if parse_again {
1765 this.reparse(cx, false);
1766 }
1767 })
1768 .ok();
1769 }));
1770 }
1771 }
1772
1773 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1774 self.was_changed();
1775 self.non_text_state_update_count += 1;
1776 self.syntax_map.lock().did_parse(syntax_snapshot);
1777 self.request_autoindent(cx);
1778 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1779 self.invalidate_tree_sitter_data(self.text.snapshot());
1780 cx.emit(BufferEvent::Reparsed);
1781 cx.notify();
1782 }
1783
1784 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1785 self.parse_status.1.clone()
1786 }
1787
1788 /// Wait until the buffer is no longer parsing
1789 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1790 let mut parse_status = self.parse_status();
1791 async move {
1792 while *parse_status.borrow() != ParseStatus::Idle {
1793 if parse_status.changed().await.is_err() {
1794 break;
1795 }
1796 }
1797 }
1798 }
1799
1800 /// Assign to the buffer a set of diagnostics created by a given language server.
1801 pub fn update_diagnostics(
1802 &mut self,
1803 server_id: LanguageServerId,
1804 diagnostics: DiagnosticSet,
1805 cx: &mut Context<Self>,
1806 ) {
1807 let lamport_timestamp = self.text.lamport_clock.tick();
1808 let op = Operation::UpdateDiagnostics {
1809 server_id,
1810 diagnostics: diagnostics.iter().cloned().collect(),
1811 lamport_timestamp,
1812 };
1813
1814 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1815 self.send_operation(op, true, cx);
1816 }
1817
1818 pub fn buffer_diagnostics(
1819 &self,
1820 for_server: Option<LanguageServerId>,
1821 ) -> Vec<&DiagnosticEntry<Anchor>> {
1822 match for_server {
1823 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1824 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1825 Err(_) => Vec::new(),
1826 },
1827 None => self
1828 .diagnostics
1829 .iter()
1830 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1831 .collect(),
1832 }
1833 }
1834
1835 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1836 if let Some(indent_sizes) = self.compute_autoindents() {
1837 let indent_sizes = cx.background_spawn(indent_sizes);
1838 match cx
1839 .background_executor()
1840 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1841 {
1842 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1843 Err(indent_sizes) => {
1844 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1845 let indent_sizes = indent_sizes.await;
1846 this.update(cx, |this, cx| {
1847 this.apply_autoindents(indent_sizes, cx);
1848 })
1849 .ok();
1850 }));
1851 }
1852 }
1853 } else {
1854 self.autoindent_requests.clear();
1855 for tx in self.wait_for_autoindent_txs.drain(..) {
1856 tx.send(()).ok();
1857 }
1858 }
1859 }
1860
1861 fn compute_autoindents(
1862 &self,
1863 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1864 let max_rows_between_yields = 100;
1865 let snapshot = self.snapshot();
1866 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1867 return None;
1868 }
1869
1870 let autoindent_requests = self.autoindent_requests.clone();
1871 Some(async move {
1872 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1873 for request in autoindent_requests {
1874 // Resolve each edited range to its row in the current buffer and in the
1875 // buffer before this batch of edits.
1876 let mut row_ranges = Vec::new();
1877 let mut old_to_new_rows = BTreeMap::new();
1878 let mut language_indent_sizes_by_new_row = Vec::new();
1879 for entry in &request.entries {
1880 let position = entry.range.start;
1881 let new_row = position.to_point(&snapshot).row;
1882 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1883 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1884
1885 if !entry.first_line_is_new {
1886 let old_row = position.to_point(&request.before_edit).row;
1887 old_to_new_rows.insert(old_row, new_row);
1888 }
1889 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1890 }
1891
1892 // Build a map containing the suggested indentation for each of the edited lines
1893 // with respect to the state of the buffer before these edits. This map is keyed
1894 // by the rows for these lines in the current state of the buffer.
1895 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1896 let old_edited_ranges =
1897 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1898 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1899 let mut language_indent_size = IndentSize::default();
1900 for old_edited_range in old_edited_ranges {
1901 let suggestions = request
1902 .before_edit
1903 .suggest_autoindents(old_edited_range.clone())
1904 .into_iter()
1905 .flatten();
1906 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1907 if let Some(suggestion) = suggestion {
1908 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1909
1910 // Find the indent size based on the language for this row.
1911 while let Some((row, size)) = language_indent_sizes.peek() {
1912 if *row > new_row {
1913 break;
1914 }
1915 language_indent_size = *size;
1916 language_indent_sizes.next();
1917 }
1918
1919 let suggested_indent = old_to_new_rows
1920 .get(&suggestion.basis_row)
1921 .and_then(|from_row| {
1922 Some(old_suggestions.get(from_row).copied()?.0)
1923 })
1924 .unwrap_or_else(|| {
1925 request
1926 .before_edit
1927 .indent_size_for_line(suggestion.basis_row)
1928 })
1929 .with_delta(suggestion.delta, language_indent_size);
1930 old_suggestions
1931 .insert(new_row, (suggested_indent, suggestion.within_error));
1932 }
1933 }
1934 yield_now().await;
1935 }
1936
1937 // Compute new suggestions for each line, but only include them in the result
1938 // if they differ from the old suggestion for that line.
1939 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1940 let mut language_indent_size = IndentSize::default();
1941 for (row_range, original_indent_column) in row_ranges {
1942 let new_edited_row_range = if request.is_block_mode {
1943 row_range.start..row_range.start + 1
1944 } else {
1945 row_range.clone()
1946 };
1947
1948 let suggestions = snapshot
1949 .suggest_autoindents(new_edited_row_range.clone())
1950 .into_iter()
1951 .flatten();
1952 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1953 if let Some(suggestion) = suggestion {
1954 // Find the indent size based on the language for this row.
1955 while let Some((row, size)) = language_indent_sizes.peek() {
1956 if *row > new_row {
1957 break;
1958 }
1959 language_indent_size = *size;
1960 language_indent_sizes.next();
1961 }
1962
1963 let suggested_indent = indent_sizes
1964 .get(&suggestion.basis_row)
1965 .copied()
1966 .map(|e| e.0)
1967 .unwrap_or_else(|| {
1968 snapshot.indent_size_for_line(suggestion.basis_row)
1969 })
1970 .with_delta(suggestion.delta, language_indent_size);
1971
1972 if old_suggestions.get(&new_row).is_none_or(
1973 |(old_indentation, was_within_error)| {
1974 suggested_indent != *old_indentation
1975 && (!suggestion.within_error || *was_within_error)
1976 },
1977 ) {
1978 indent_sizes.insert(
1979 new_row,
1980 (suggested_indent, request.ignore_empty_lines),
1981 );
1982 }
1983 }
1984 }
1985
1986 if let (true, Some(original_indent_column)) =
1987 (request.is_block_mode, original_indent_column)
1988 {
1989 let new_indent =
1990 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1991 *indent
1992 } else {
1993 snapshot.indent_size_for_line(row_range.start)
1994 };
1995 let delta = new_indent.len as i64 - original_indent_column as i64;
1996 if delta != 0 {
1997 for row in row_range.skip(1) {
1998 indent_sizes.entry(row).or_insert_with(|| {
1999 let mut size = snapshot.indent_size_for_line(row);
2000 if size.kind == new_indent.kind {
2001 match delta.cmp(&0) {
2002 Ordering::Greater => size.len += delta as u32,
2003 Ordering::Less => {
2004 size.len = size.len.saturating_sub(-delta as u32)
2005 }
2006 Ordering::Equal => {}
2007 }
2008 }
2009 (size, request.ignore_empty_lines)
2010 });
2011 }
2012 }
2013 }
2014
2015 yield_now().await;
2016 }
2017 }
2018
2019 indent_sizes
2020 .into_iter()
2021 .filter_map(|(row, (indent, ignore_empty_lines))| {
2022 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2023 None
2024 } else {
2025 Some((row, indent))
2026 }
2027 })
2028 .collect()
2029 })
2030 }
2031
2032 fn apply_autoindents(
2033 &mut self,
2034 indent_sizes: BTreeMap<u32, IndentSize>,
2035 cx: &mut Context<Self>,
2036 ) {
2037 self.autoindent_requests.clear();
2038 for tx in self.wait_for_autoindent_txs.drain(..) {
2039 tx.send(()).ok();
2040 }
2041
2042 let edits: Vec<_> = indent_sizes
2043 .into_iter()
2044 .filter_map(|(row, indent_size)| {
2045 let current_size = indent_size_for_line(self, row);
2046 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2047 })
2048 .collect();
2049
2050 let preserve_preview = self.preserve_preview();
2051 self.edit(edits, None, cx);
2052 if preserve_preview {
2053 self.refresh_preview();
2054 }
2055 }
2056
2057 /// Create a minimal edit that will cause the given row to be indented
2058 /// with the given size. After applying this edit, the length of the line
2059 /// will always be at least `new_size.len`.
2060 pub fn edit_for_indent_size_adjustment(
2061 row: u32,
2062 current_size: IndentSize,
2063 new_size: IndentSize,
2064 ) -> Option<(Range<Point>, String)> {
2065 if new_size.kind == current_size.kind {
2066 match new_size.len.cmp(¤t_size.len) {
2067 Ordering::Greater => {
2068 let point = Point::new(row, 0);
2069 Some((
2070 point..point,
2071 iter::repeat(new_size.char())
2072 .take((new_size.len - current_size.len) as usize)
2073 .collect::<String>(),
2074 ))
2075 }
2076
2077 Ordering::Less => Some((
2078 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2079 String::new(),
2080 )),
2081
2082 Ordering::Equal => None,
2083 }
2084 } else {
2085 Some((
2086 Point::new(row, 0)..Point::new(row, current_size.len),
2087 iter::repeat(new_size.char())
2088 .take(new_size.len as usize)
2089 .collect::<String>(),
2090 ))
2091 }
2092 }
2093
2094 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2095 /// and the given new text.
2096 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2097 let old_text = self.as_rope().clone();
2098 let base_version = self.version();
2099 cx.background_executor()
2100 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2101 let old_text = old_text.to_string();
2102 let line_ending = LineEnding::detect(&new_text);
2103 LineEnding::normalize(&mut new_text);
2104 let edits = text_diff(&old_text, &new_text);
2105 Diff {
2106 base_version,
2107 line_ending,
2108 edits,
2109 }
2110 })
2111 }
2112
2113 /// Spawns a background task that searches the buffer for any whitespace
2114 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2115 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2116 let old_text = self.as_rope().clone();
2117 let line_ending = self.line_ending();
2118 let base_version = self.version();
2119 cx.background_spawn(async move {
2120 let ranges = trailing_whitespace_ranges(&old_text);
2121 let empty = Arc::<str>::from("");
2122 Diff {
2123 base_version,
2124 line_ending,
2125 edits: ranges
2126 .into_iter()
2127 .map(|range| (range, empty.clone()))
2128 .collect(),
2129 }
2130 })
2131 }
2132
2133 /// Ensures that the buffer ends with a single newline character, and
2134 /// no other whitespace. Skips if the buffer is empty.
2135 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2136 let len = self.len();
2137 if len == 0 {
2138 return;
2139 }
2140 let mut offset = len;
2141 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2142 let non_whitespace_len = chunk
2143 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2144 .len();
2145 offset -= chunk.len();
2146 offset += non_whitespace_len;
2147 if non_whitespace_len != 0 {
2148 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2149 return;
2150 }
2151 break;
2152 }
2153 }
2154 self.edit([(offset..len, "\n")], None, cx);
2155 }
2156
2157 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2158 /// calculated, then adjust the diff to account for those changes, and discard any
2159 /// parts of the diff that conflict with those changes.
2160 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2161 let snapshot = self.snapshot();
2162 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2163 let mut delta = 0;
2164 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2165 while let Some(edit_since) = edits_since.peek() {
2166 // If the edit occurs after a diff hunk, then it does not
2167 // affect that hunk.
2168 if edit_since.old.start > range.end {
2169 break;
2170 }
2171 // If the edit precedes the diff hunk, then adjust the hunk
2172 // to reflect the edit.
2173 else if edit_since.old.end < range.start {
2174 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2175 edits_since.next();
2176 }
2177 // If the edit intersects a diff hunk, then discard that hunk.
2178 else {
2179 return None;
2180 }
2181 }
2182
2183 let start = (range.start as i64 + delta) as usize;
2184 let end = (range.end as i64 + delta) as usize;
2185 Some((start..end, new_text))
2186 });
2187
2188 self.start_transaction();
2189 self.text.set_line_ending(diff.line_ending);
2190 self.edit(adjusted_edits, None, cx);
2191 self.end_transaction(cx)
2192 }
2193
2194 pub fn has_unsaved_edits(&self) -> bool {
2195 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2196
2197 if last_version == self.version {
2198 self.has_unsaved_edits
2199 .set((last_version, has_unsaved_edits));
2200 return has_unsaved_edits;
2201 }
2202
2203 let has_edits = self.has_edits_since(&self.saved_version);
2204 self.has_unsaved_edits
2205 .set((self.version.clone(), has_edits));
2206 has_edits
2207 }
2208
2209 /// Checks if the buffer has unsaved changes.
2210 pub fn is_dirty(&self) -> bool {
2211 if self.capability == Capability::ReadOnly {
2212 return false;
2213 }
2214 if self.has_conflict {
2215 return true;
2216 }
2217 match self.file.as_ref().map(|f| f.disk_state()) {
2218 Some(DiskState::New) | Some(DiskState::Deleted) => {
2219 !self.is_empty() && self.has_unsaved_edits()
2220 }
2221 _ => self.has_unsaved_edits(),
2222 }
2223 }
2224
2225 /// Marks the buffer as having a conflict regardless of current buffer state.
2226 pub fn set_conflict(&mut self) {
2227 self.has_conflict = true;
2228 }
2229
2230 /// Checks if the buffer and its file have both changed since the buffer
2231 /// was last saved or reloaded.
2232 pub fn has_conflict(&self) -> bool {
2233 if self.has_conflict {
2234 return true;
2235 }
2236 let Some(file) = self.file.as_ref() else {
2237 return false;
2238 };
2239 match file.disk_state() {
2240 DiskState::New => false,
2241 DiskState::Present { mtime } => match self.saved_mtime {
2242 Some(saved_mtime) => {
2243 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2244 }
2245 None => true,
2246 },
2247 DiskState::Deleted => false,
2248 }
2249 }
2250
2251 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2252 pub fn subscribe(&mut self) -> Subscription<usize> {
2253 self.text.subscribe()
2254 }
2255
2256 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2257 ///
2258 /// This allows downstream code to check if the buffer's text has changed without
2259 /// waiting for an effect cycle, which would be required if using eents.
2260 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2261 if let Err(ix) = self
2262 .change_bits
2263 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2264 {
2265 self.change_bits.insert(ix, bit);
2266 }
2267 }
2268
2269 /// Set the change bit for all "listeners".
2270 fn was_changed(&mut self) {
2271 self.change_bits.retain(|change_bit| {
2272 change_bit
2273 .upgrade()
2274 .inspect(|bit| {
2275 _ = bit.replace(true);
2276 })
2277 .is_some()
2278 });
2279 }
2280
2281 /// Starts a transaction, if one is not already in-progress. When undoing or
2282 /// redoing edits, all of the edits performed within a transaction are undone
2283 /// or redone together.
2284 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2285 self.start_transaction_at(Instant::now())
2286 }
2287
2288 /// Starts a transaction, providing the current time. Subsequent transactions
2289 /// that occur within a short period of time will be grouped together. This
2290 /// is controlled by the buffer's undo grouping duration.
2291 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2292 self.transaction_depth += 1;
2293 if self.was_dirty_before_starting_transaction.is_none() {
2294 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2295 }
2296 self.text.start_transaction_at(now)
2297 }
2298
2299 /// Terminates the current transaction, if this is the outermost transaction.
2300 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2301 self.end_transaction_at(Instant::now(), cx)
2302 }
2303
2304 /// Terminates the current transaction, providing the current time. Subsequent transactions
2305 /// that occur within a short period of time will be grouped together. This
2306 /// is controlled by the buffer's undo grouping duration.
2307 pub fn end_transaction_at(
2308 &mut self,
2309 now: Instant,
2310 cx: &mut Context<Self>,
2311 ) -> Option<TransactionId> {
2312 assert!(self.transaction_depth > 0);
2313 self.transaction_depth -= 1;
2314 let was_dirty = if self.transaction_depth == 0 {
2315 self.was_dirty_before_starting_transaction.take().unwrap()
2316 } else {
2317 false
2318 };
2319 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2320 self.did_edit(&start_version, was_dirty, cx);
2321 Some(transaction_id)
2322 } else {
2323 None
2324 }
2325 }
2326
2327 /// Manually add a transaction to the buffer's undo history.
2328 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2329 self.text.push_transaction(transaction, now);
2330 }
2331
2332 /// Differs from `push_transaction` in that it does not clear the redo
2333 /// stack. Intended to be used to create a parent transaction to merge
2334 /// potential child transactions into.
2335 ///
2336 /// The caller is responsible for removing it from the undo history using
2337 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2338 /// are merged into this transaction, the caller is responsible for ensuring
2339 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2340 /// cleared is to create transactions with the usual `start_transaction` and
2341 /// `end_transaction` methods and merging the resulting transactions into
2342 /// the transaction created by this method
2343 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2344 self.text.push_empty_transaction(now)
2345 }
2346
2347 /// Prevent the last transaction from being grouped with any subsequent transactions,
2348 /// even if they occur with the buffer's undo grouping duration.
2349 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2350 self.text.finalize_last_transaction()
2351 }
2352
2353 /// Manually group all changes since a given transaction.
2354 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2355 self.text.group_until_transaction(transaction_id);
2356 }
2357
2358 /// Manually remove a transaction from the buffer's undo history
2359 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2360 self.text.forget_transaction(transaction_id)
2361 }
2362
2363 /// Retrieve a transaction from the buffer's undo history
2364 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2365 self.text.get_transaction(transaction_id)
2366 }
2367
2368 /// Manually merge two transactions in the buffer's undo history.
2369 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2370 self.text.merge_transactions(transaction, destination);
2371 }
2372
2373 /// Waits for the buffer to receive operations with the given timestamps.
2374 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2375 &mut self,
2376 edit_ids: It,
2377 ) -> impl Future<Output = Result<()>> + use<It> {
2378 self.text.wait_for_edits(edit_ids)
2379 }
2380
2381 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2382 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2383 &mut self,
2384 anchors: It,
2385 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2386 self.text.wait_for_anchors(anchors)
2387 }
2388
2389 /// Waits for the buffer to receive operations up to the given version.
2390 pub fn wait_for_version(
2391 &mut self,
2392 version: clock::Global,
2393 ) -> impl Future<Output = Result<()>> + use<> {
2394 self.text.wait_for_version(version)
2395 }
2396
2397 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2398 /// [`Buffer::wait_for_version`] to resolve with an error.
2399 pub fn give_up_waiting(&mut self) {
2400 self.text.give_up_waiting();
2401 }
2402
2403 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2404 let mut rx = None;
2405 if !self.autoindent_requests.is_empty() {
2406 let channel = oneshot::channel();
2407 self.wait_for_autoindent_txs.push(channel.0);
2408 rx = Some(channel.1);
2409 }
2410 rx
2411 }
2412
2413 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2414 pub fn set_active_selections(
2415 &mut self,
2416 selections: Arc<[Selection<Anchor>]>,
2417 line_mode: bool,
2418 cursor_shape: CursorShape,
2419 cx: &mut Context<Self>,
2420 ) {
2421 let lamport_timestamp = self.text.lamport_clock.tick();
2422 self.remote_selections.insert(
2423 self.text.replica_id(),
2424 SelectionSet {
2425 selections: selections.clone(),
2426 lamport_timestamp,
2427 line_mode,
2428 cursor_shape,
2429 },
2430 );
2431 self.send_operation(
2432 Operation::UpdateSelections {
2433 selections,
2434 line_mode,
2435 lamport_timestamp,
2436 cursor_shape,
2437 },
2438 true,
2439 cx,
2440 );
2441 self.non_text_state_update_count += 1;
2442 cx.notify();
2443 }
2444
2445 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2446 /// this replica.
2447 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2448 if self
2449 .remote_selections
2450 .get(&self.text.replica_id())
2451 .is_none_or(|set| !set.selections.is_empty())
2452 {
2453 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2454 }
2455 }
2456
2457 pub fn set_agent_selections(
2458 &mut self,
2459 selections: Arc<[Selection<Anchor>]>,
2460 line_mode: bool,
2461 cursor_shape: CursorShape,
2462 cx: &mut Context<Self>,
2463 ) {
2464 let lamport_timestamp = self.text.lamport_clock.tick();
2465 self.remote_selections.insert(
2466 ReplicaId::AGENT,
2467 SelectionSet {
2468 selections,
2469 lamport_timestamp,
2470 line_mode,
2471 cursor_shape,
2472 },
2473 );
2474 self.non_text_state_update_count += 1;
2475 cx.notify();
2476 }
2477
2478 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2479 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2480 }
2481
2482 /// Replaces the buffer's entire text.
2483 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2484 where
2485 T: Into<Arc<str>>,
2486 {
2487 self.autoindent_requests.clear();
2488 self.edit([(0..self.len(), text)], None, cx)
2489 }
2490
2491 /// Appends the given text to the end of the buffer.
2492 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2493 where
2494 T: Into<Arc<str>>,
2495 {
2496 self.edit([(self.len()..self.len(), text)], None, cx)
2497 }
2498
2499 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2500 /// delete, and a string of text to insert at that location.
2501 ///
2502 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2503 /// request for the edited ranges, which will be processed when the buffer finishes
2504 /// parsing.
2505 ///
2506 /// Parsing takes place at the end of a transaction, and may compute synchronously
2507 /// or asynchronously, depending on the changes.
2508 pub fn edit<I, S, T>(
2509 &mut self,
2510 edits_iter: I,
2511 autoindent_mode: Option<AutoindentMode>,
2512 cx: &mut Context<Self>,
2513 ) -> Option<clock::Lamport>
2514 where
2515 I: IntoIterator<Item = (Range<S>, T)>,
2516 S: ToOffset,
2517 T: Into<Arc<str>>,
2518 {
2519 // Skip invalid edits and coalesce contiguous ones.
2520 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2521
2522 for (range, new_text) in edits_iter {
2523 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2524
2525 if range.start > range.end {
2526 mem::swap(&mut range.start, &mut range.end);
2527 }
2528 let new_text = new_text.into();
2529 if !new_text.is_empty() || !range.is_empty() {
2530 if let Some((prev_range, prev_text)) = edits.last_mut()
2531 && prev_range.end >= range.start
2532 {
2533 prev_range.end = cmp::max(prev_range.end, range.end);
2534 *prev_text = format!("{prev_text}{new_text}").into();
2535 } else {
2536 edits.push((range, new_text));
2537 }
2538 }
2539 }
2540 if edits.is_empty() {
2541 return None;
2542 }
2543
2544 self.start_transaction();
2545 self.pending_autoindent.take();
2546 let autoindent_request = autoindent_mode
2547 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2548
2549 let edit_operation = self.text.edit(edits.iter().cloned());
2550 let edit_id = edit_operation.timestamp();
2551
2552 if let Some((before_edit, mode)) = autoindent_request {
2553 let mut delta = 0isize;
2554 let mut previous_setting = None;
2555 let entries: Vec<_> = edits
2556 .into_iter()
2557 .enumerate()
2558 .zip(&edit_operation.as_edit().unwrap().new_text)
2559 .filter(|((_, (range, _)), _)| {
2560 let language = before_edit.language_at(range.start);
2561 let language_id = language.map(|l| l.id());
2562 if let Some((cached_language_id, auto_indent)) = previous_setting
2563 && cached_language_id == language_id
2564 {
2565 auto_indent
2566 } else {
2567 // The auto-indent setting is not present in editorconfigs, hence
2568 // we can avoid passing the file here.
2569 let auto_indent =
2570 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2571 previous_setting = Some((language_id, auto_indent));
2572 auto_indent
2573 }
2574 })
2575 .map(|((ix, (range, _)), new_text)| {
2576 let new_text_length = new_text.len();
2577 let old_start = range.start.to_point(&before_edit);
2578 let new_start = (delta + range.start as isize) as usize;
2579 let range_len = range.end - range.start;
2580 delta += new_text_length as isize - range_len as isize;
2581
2582 // Decide what range of the insertion to auto-indent, and whether
2583 // the first line of the insertion should be considered a newly-inserted line
2584 // or an edit to an existing line.
2585 let mut range_of_insertion_to_indent = 0..new_text_length;
2586 let mut first_line_is_new = true;
2587
2588 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2589 let old_line_end = before_edit.line_len(old_start.row);
2590
2591 if old_start.column > old_line_start {
2592 first_line_is_new = false;
2593 }
2594
2595 if !new_text.contains('\n')
2596 && (old_start.column + (range_len as u32) < old_line_end
2597 || old_line_end == old_line_start)
2598 {
2599 first_line_is_new = false;
2600 }
2601
2602 // When inserting text starting with a newline, avoid auto-indenting the
2603 // previous line.
2604 if new_text.starts_with('\n') {
2605 range_of_insertion_to_indent.start += 1;
2606 first_line_is_new = true;
2607 }
2608
2609 let mut original_indent_column = None;
2610 if let AutoindentMode::Block {
2611 original_indent_columns,
2612 } = &mode
2613 {
2614 original_indent_column = Some(if new_text.starts_with('\n') {
2615 indent_size_for_text(
2616 new_text[range_of_insertion_to_indent.clone()].chars(),
2617 )
2618 .len
2619 } else {
2620 original_indent_columns
2621 .get(ix)
2622 .copied()
2623 .flatten()
2624 .unwrap_or_else(|| {
2625 indent_size_for_text(
2626 new_text[range_of_insertion_to_indent.clone()].chars(),
2627 )
2628 .len
2629 })
2630 });
2631
2632 // Avoid auto-indenting the line after the edit.
2633 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2634 range_of_insertion_to_indent.end -= 1;
2635 }
2636 }
2637
2638 AutoindentRequestEntry {
2639 first_line_is_new,
2640 original_indent_column,
2641 indent_size: before_edit.language_indent_size_at(range.start, cx),
2642 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2643 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2644 }
2645 })
2646 .collect();
2647
2648 if !entries.is_empty() {
2649 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2650 before_edit,
2651 entries,
2652 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2653 ignore_empty_lines: false,
2654 }));
2655 }
2656 }
2657
2658 self.end_transaction(cx);
2659 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2660 Some(edit_id)
2661 }
2662
2663 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2664 self.was_changed();
2665
2666 if self.edits_since::<usize>(old_version).next().is_none() {
2667 return;
2668 }
2669
2670 self.reparse(cx, true);
2671 cx.emit(BufferEvent::Edited);
2672 if was_dirty != self.is_dirty() {
2673 cx.emit(BufferEvent::DirtyChanged);
2674 }
2675 cx.notify();
2676 }
2677
2678 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2679 where
2680 I: IntoIterator<Item = Range<T>>,
2681 T: ToOffset + Copy,
2682 {
2683 let before_edit = self.snapshot();
2684 let entries = ranges
2685 .into_iter()
2686 .map(|range| AutoindentRequestEntry {
2687 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2688 first_line_is_new: true,
2689 indent_size: before_edit.language_indent_size_at(range.start, cx),
2690 original_indent_column: None,
2691 })
2692 .collect();
2693 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2694 before_edit,
2695 entries,
2696 is_block_mode: false,
2697 ignore_empty_lines: true,
2698 }));
2699 self.request_autoindent(cx);
2700 }
2701
2702 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2703 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2704 pub fn insert_empty_line(
2705 &mut self,
2706 position: impl ToPoint,
2707 space_above: bool,
2708 space_below: bool,
2709 cx: &mut Context<Self>,
2710 ) -> Point {
2711 let mut position = position.to_point(self);
2712
2713 self.start_transaction();
2714
2715 self.edit(
2716 [(position..position, "\n")],
2717 Some(AutoindentMode::EachLine),
2718 cx,
2719 );
2720
2721 if position.column > 0 {
2722 position += Point::new(1, 0);
2723 }
2724
2725 if !self.is_line_blank(position.row) {
2726 self.edit(
2727 [(position..position, "\n")],
2728 Some(AutoindentMode::EachLine),
2729 cx,
2730 );
2731 }
2732
2733 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2734 self.edit(
2735 [(position..position, "\n")],
2736 Some(AutoindentMode::EachLine),
2737 cx,
2738 );
2739 position.row += 1;
2740 }
2741
2742 if space_below
2743 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2744 {
2745 self.edit(
2746 [(position..position, "\n")],
2747 Some(AutoindentMode::EachLine),
2748 cx,
2749 );
2750 }
2751
2752 self.end_transaction(cx);
2753
2754 position
2755 }
2756
2757 /// Applies the given remote operations to the buffer.
2758 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2759 self.pending_autoindent.take();
2760 let was_dirty = self.is_dirty();
2761 let old_version = self.version.clone();
2762 let mut deferred_ops = Vec::new();
2763 let buffer_ops = ops
2764 .into_iter()
2765 .filter_map(|op| match op {
2766 Operation::Buffer(op) => Some(op),
2767 _ => {
2768 if self.can_apply_op(&op) {
2769 self.apply_op(op, cx);
2770 } else {
2771 deferred_ops.push(op);
2772 }
2773 None
2774 }
2775 })
2776 .collect::<Vec<_>>();
2777 for operation in buffer_ops.iter() {
2778 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2779 }
2780 self.text.apply_ops(buffer_ops);
2781 self.deferred_ops.insert(deferred_ops);
2782 self.flush_deferred_ops(cx);
2783 self.did_edit(&old_version, was_dirty, cx);
2784 // Notify independently of whether the buffer was edited as the operations could include a
2785 // selection update.
2786 cx.notify();
2787 }
2788
2789 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2790 let mut deferred_ops = Vec::new();
2791 for op in self.deferred_ops.drain().iter().cloned() {
2792 if self.can_apply_op(&op) {
2793 self.apply_op(op, cx);
2794 } else {
2795 deferred_ops.push(op);
2796 }
2797 }
2798 self.deferred_ops.insert(deferred_ops);
2799 }
2800
2801 pub fn has_deferred_ops(&self) -> bool {
2802 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2803 }
2804
2805 fn can_apply_op(&self, operation: &Operation) -> bool {
2806 match operation {
2807 Operation::Buffer(_) => {
2808 unreachable!("buffer operations should never be applied at this layer")
2809 }
2810 Operation::UpdateDiagnostics {
2811 diagnostics: diagnostic_set,
2812 ..
2813 } => diagnostic_set.iter().all(|diagnostic| {
2814 self.text.can_resolve(&diagnostic.range.start)
2815 && self.text.can_resolve(&diagnostic.range.end)
2816 }),
2817 Operation::UpdateSelections { selections, .. } => selections
2818 .iter()
2819 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2820 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2821 }
2822 }
2823
2824 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2825 match operation {
2826 Operation::Buffer(_) => {
2827 unreachable!("buffer operations should never be applied at this layer")
2828 }
2829 Operation::UpdateDiagnostics {
2830 server_id,
2831 diagnostics: diagnostic_set,
2832 lamport_timestamp,
2833 } => {
2834 let snapshot = self.snapshot();
2835 self.apply_diagnostic_update(
2836 server_id,
2837 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2838 lamport_timestamp,
2839 cx,
2840 );
2841 }
2842 Operation::UpdateSelections {
2843 selections,
2844 lamport_timestamp,
2845 line_mode,
2846 cursor_shape,
2847 } => {
2848 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2849 && set.lamport_timestamp > lamport_timestamp
2850 {
2851 return;
2852 }
2853
2854 self.remote_selections.insert(
2855 lamport_timestamp.replica_id,
2856 SelectionSet {
2857 selections,
2858 lamport_timestamp,
2859 line_mode,
2860 cursor_shape,
2861 },
2862 );
2863 self.text.lamport_clock.observe(lamport_timestamp);
2864 self.non_text_state_update_count += 1;
2865 }
2866 Operation::UpdateCompletionTriggers {
2867 triggers,
2868 lamport_timestamp,
2869 server_id,
2870 } => {
2871 if triggers.is_empty() {
2872 self.completion_triggers_per_language_server
2873 .remove(&server_id);
2874 self.completion_triggers = self
2875 .completion_triggers_per_language_server
2876 .values()
2877 .flat_map(|triggers| triggers.iter().cloned())
2878 .collect();
2879 } else {
2880 self.completion_triggers_per_language_server
2881 .insert(server_id, triggers.iter().cloned().collect());
2882 self.completion_triggers.extend(triggers);
2883 }
2884 self.text.lamport_clock.observe(lamport_timestamp);
2885 }
2886 Operation::UpdateLineEnding {
2887 line_ending,
2888 lamport_timestamp,
2889 } => {
2890 self.text.set_line_ending(line_ending);
2891 self.text.lamport_clock.observe(lamport_timestamp);
2892 }
2893 }
2894 }
2895
2896 fn apply_diagnostic_update(
2897 &mut self,
2898 server_id: LanguageServerId,
2899 diagnostics: DiagnosticSet,
2900 lamport_timestamp: clock::Lamport,
2901 cx: &mut Context<Self>,
2902 ) {
2903 if lamport_timestamp > self.diagnostics_timestamp {
2904 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2905 if diagnostics.is_empty() {
2906 if let Ok(ix) = ix {
2907 self.diagnostics.remove(ix);
2908 }
2909 } else {
2910 match ix {
2911 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2912 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2913 };
2914 }
2915 self.diagnostics_timestamp = lamport_timestamp;
2916 self.non_text_state_update_count += 1;
2917 self.text.lamport_clock.observe(lamport_timestamp);
2918 cx.notify();
2919 cx.emit(BufferEvent::DiagnosticsUpdated);
2920 }
2921 }
2922
2923 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2924 self.was_changed();
2925 cx.emit(BufferEvent::Operation {
2926 operation,
2927 is_local,
2928 });
2929 }
2930
2931 /// Removes the selections for a given peer.
2932 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2933 self.remote_selections.remove(&replica_id);
2934 cx.notify();
2935 }
2936
2937 /// Undoes the most recent transaction.
2938 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2939 let was_dirty = self.is_dirty();
2940 let old_version = self.version.clone();
2941
2942 if let Some((transaction_id, operation)) = self.text.undo() {
2943 self.send_operation(Operation::Buffer(operation), true, cx);
2944 self.did_edit(&old_version, was_dirty, cx);
2945 Some(transaction_id)
2946 } else {
2947 None
2948 }
2949 }
2950
2951 /// Manually undoes a specific transaction in the buffer's undo history.
2952 pub fn undo_transaction(
2953 &mut self,
2954 transaction_id: TransactionId,
2955 cx: &mut Context<Self>,
2956 ) -> bool {
2957 let was_dirty = self.is_dirty();
2958 let old_version = self.version.clone();
2959 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2960 self.send_operation(Operation::Buffer(operation), true, cx);
2961 self.did_edit(&old_version, was_dirty, cx);
2962 true
2963 } else {
2964 false
2965 }
2966 }
2967
2968 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2969 pub fn undo_to_transaction(
2970 &mut self,
2971 transaction_id: TransactionId,
2972 cx: &mut Context<Self>,
2973 ) -> bool {
2974 let was_dirty = self.is_dirty();
2975 let old_version = self.version.clone();
2976
2977 let operations = self.text.undo_to_transaction(transaction_id);
2978 let undone = !operations.is_empty();
2979 for operation in operations {
2980 self.send_operation(Operation::Buffer(operation), true, cx);
2981 }
2982 if undone {
2983 self.did_edit(&old_version, was_dirty, cx)
2984 }
2985 undone
2986 }
2987
2988 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2989 let was_dirty = self.is_dirty();
2990 let operation = self.text.undo_operations(counts);
2991 let old_version = self.version.clone();
2992 self.send_operation(Operation::Buffer(operation), true, cx);
2993 self.did_edit(&old_version, was_dirty, cx);
2994 }
2995
2996 /// Manually redoes a specific transaction in the buffer's redo history.
2997 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2998 let was_dirty = self.is_dirty();
2999 let old_version = self.version.clone();
3000
3001 if let Some((transaction_id, operation)) = self.text.redo() {
3002 self.send_operation(Operation::Buffer(operation), true, cx);
3003 self.did_edit(&old_version, was_dirty, cx);
3004 Some(transaction_id)
3005 } else {
3006 None
3007 }
3008 }
3009
3010 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3011 pub fn redo_to_transaction(
3012 &mut self,
3013 transaction_id: TransactionId,
3014 cx: &mut Context<Self>,
3015 ) -> bool {
3016 let was_dirty = self.is_dirty();
3017 let old_version = self.version.clone();
3018
3019 let operations = self.text.redo_to_transaction(transaction_id);
3020 let redone = !operations.is_empty();
3021 for operation in operations {
3022 self.send_operation(Operation::Buffer(operation), true, cx);
3023 }
3024 if redone {
3025 self.did_edit(&old_version, was_dirty, cx)
3026 }
3027 redone
3028 }
3029
3030 /// Override current completion triggers with the user-provided completion triggers.
3031 pub fn set_completion_triggers(
3032 &mut self,
3033 server_id: LanguageServerId,
3034 triggers: BTreeSet<String>,
3035 cx: &mut Context<Self>,
3036 ) {
3037 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3038 if triggers.is_empty() {
3039 self.completion_triggers_per_language_server
3040 .remove(&server_id);
3041 self.completion_triggers = self
3042 .completion_triggers_per_language_server
3043 .values()
3044 .flat_map(|triggers| triggers.iter().cloned())
3045 .collect();
3046 } else {
3047 self.completion_triggers_per_language_server
3048 .insert(server_id, triggers.clone());
3049 self.completion_triggers.extend(triggers.iter().cloned());
3050 }
3051 self.send_operation(
3052 Operation::UpdateCompletionTriggers {
3053 triggers: triggers.into_iter().collect(),
3054 lamport_timestamp: self.completion_triggers_timestamp,
3055 server_id,
3056 },
3057 true,
3058 cx,
3059 );
3060 cx.notify();
3061 }
3062
3063 /// Returns a list of strings which trigger a completion menu for this language.
3064 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3065 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3066 &self.completion_triggers
3067 }
3068
3069 /// Call this directly after performing edits to prevent the preview tab
3070 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3071 /// to return false until there are additional edits.
3072 pub fn refresh_preview(&mut self) {
3073 self.preview_version = self.version.clone();
3074 }
3075
3076 /// Whether we should preserve the preview status of a tab containing this buffer.
3077 pub fn preserve_preview(&self) -> bool {
3078 !self.has_edits_since(&self.preview_version)
3079 }
3080}
3081
3082#[doc(hidden)]
3083#[cfg(any(test, feature = "test-support"))]
3084impl Buffer {
3085 pub fn edit_via_marked_text(
3086 &mut self,
3087 marked_string: &str,
3088 autoindent_mode: Option<AutoindentMode>,
3089 cx: &mut Context<Self>,
3090 ) {
3091 let edits = self.edits_for_marked_text(marked_string);
3092 self.edit(edits, autoindent_mode, cx);
3093 }
3094
3095 pub fn set_group_interval(&mut self, group_interval: Duration) {
3096 self.text.set_group_interval(group_interval);
3097 }
3098
3099 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3100 where
3101 T: rand::Rng,
3102 {
3103 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3104 let mut last_end = None;
3105 for _ in 0..old_range_count {
3106 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3107 break;
3108 }
3109
3110 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3111 let mut range = self.random_byte_range(new_start, rng);
3112 if rng.random_bool(0.2) {
3113 mem::swap(&mut range.start, &mut range.end);
3114 }
3115 last_end = Some(range.end);
3116
3117 let new_text_len = rng.random_range(0..10);
3118 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3119 new_text = new_text.to_uppercase();
3120
3121 edits.push((range, new_text));
3122 }
3123 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3124 self.edit(edits, None, cx);
3125 }
3126
3127 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3128 let was_dirty = self.is_dirty();
3129 let old_version = self.version.clone();
3130
3131 let ops = self.text.randomly_undo_redo(rng);
3132 if !ops.is_empty() {
3133 for op in ops {
3134 self.send_operation(Operation::Buffer(op), true, cx);
3135 self.did_edit(&old_version, was_dirty, cx);
3136 }
3137 }
3138 }
3139}
3140
3141impl EventEmitter<BufferEvent> for Buffer {}
3142
3143impl Deref for Buffer {
3144 type Target = TextBuffer;
3145
3146 fn deref(&self) -> &Self::Target {
3147 &self.text
3148 }
3149}
3150
3151impl BufferSnapshot {
3152 /// Returns [`IndentSize`] for a given line that respects user settings and
3153 /// language preferences.
3154 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3155 indent_size_for_line(self, row)
3156 }
3157
3158 /// Returns [`IndentSize`] for a given position that respects user settings
3159 /// and language preferences.
3160 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3161 let settings = language_settings(
3162 self.language_at(position).map(|l| l.name()),
3163 self.file(),
3164 cx,
3165 );
3166 if settings.hard_tabs {
3167 IndentSize::tab()
3168 } else {
3169 IndentSize::spaces(settings.tab_size.get())
3170 }
3171 }
3172
3173 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3174 /// is passed in as `single_indent_size`.
3175 pub fn suggested_indents(
3176 &self,
3177 rows: impl Iterator<Item = u32>,
3178 single_indent_size: IndentSize,
3179 ) -> BTreeMap<u32, IndentSize> {
3180 let mut result = BTreeMap::new();
3181
3182 for row_range in contiguous_ranges(rows, 10) {
3183 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3184 Some(suggestions) => suggestions,
3185 _ => break,
3186 };
3187
3188 for (row, suggestion) in row_range.zip(suggestions) {
3189 let indent_size = if let Some(suggestion) = suggestion {
3190 result
3191 .get(&suggestion.basis_row)
3192 .copied()
3193 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3194 .with_delta(suggestion.delta, single_indent_size)
3195 } else {
3196 self.indent_size_for_line(row)
3197 };
3198
3199 result.insert(row, indent_size);
3200 }
3201 }
3202
3203 result
3204 }
3205
3206 fn suggest_autoindents(
3207 &self,
3208 row_range: Range<u32>,
3209 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3210 let config = &self.language.as_ref()?.config;
3211 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3212
3213 #[derive(Debug, Clone)]
3214 struct StartPosition {
3215 start: Point,
3216 suffix: SharedString,
3217 }
3218
3219 // Find the suggested indentation ranges based on the syntax tree.
3220 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3221 let end = Point::new(row_range.end, 0);
3222 let range = (start..end).to_offset(&self.text);
3223 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3224 Some(&grammar.indents_config.as_ref()?.query)
3225 });
3226 let indent_configs = matches
3227 .grammars()
3228 .iter()
3229 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3230 .collect::<Vec<_>>();
3231
3232 let mut indent_ranges = Vec::<Range<Point>>::new();
3233 let mut start_positions = Vec::<StartPosition>::new();
3234 let mut outdent_positions = Vec::<Point>::new();
3235 while let Some(mat) = matches.peek() {
3236 let mut start: Option<Point> = None;
3237 let mut end: Option<Point> = None;
3238
3239 let config = indent_configs[mat.grammar_index];
3240 for capture in mat.captures {
3241 if capture.index == config.indent_capture_ix {
3242 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3243 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3244 } else if Some(capture.index) == config.start_capture_ix {
3245 start = Some(Point::from_ts_point(capture.node.end_position()));
3246 } else if Some(capture.index) == config.end_capture_ix {
3247 end = Some(Point::from_ts_point(capture.node.start_position()));
3248 } else if Some(capture.index) == config.outdent_capture_ix {
3249 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3250 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3251 start_positions.push(StartPosition {
3252 start: Point::from_ts_point(capture.node.start_position()),
3253 suffix: suffix.clone(),
3254 });
3255 }
3256 }
3257
3258 matches.advance();
3259 if let Some((start, end)) = start.zip(end) {
3260 if start.row == end.row {
3261 continue;
3262 }
3263 let range = start..end;
3264 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3265 Err(ix) => indent_ranges.insert(ix, range),
3266 Ok(ix) => {
3267 let prev_range = &mut indent_ranges[ix];
3268 prev_range.end = prev_range.end.max(range.end);
3269 }
3270 }
3271 }
3272 }
3273
3274 let mut error_ranges = Vec::<Range<Point>>::new();
3275 let mut matches = self
3276 .syntax
3277 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3278 while let Some(mat) = matches.peek() {
3279 let node = mat.captures[0].node;
3280 let start = Point::from_ts_point(node.start_position());
3281 let end = Point::from_ts_point(node.end_position());
3282 let range = start..end;
3283 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3284 Ok(ix) | Err(ix) => ix,
3285 };
3286 let mut end_ix = ix;
3287 while let Some(existing_range) = error_ranges.get(end_ix) {
3288 if existing_range.end < end {
3289 end_ix += 1;
3290 } else {
3291 break;
3292 }
3293 }
3294 error_ranges.splice(ix..end_ix, [range]);
3295 matches.advance();
3296 }
3297
3298 outdent_positions.sort();
3299 for outdent_position in outdent_positions {
3300 // find the innermost indent range containing this outdent_position
3301 // set its end to the outdent position
3302 if let Some(range_to_truncate) = indent_ranges
3303 .iter_mut()
3304 .filter(|indent_range| indent_range.contains(&outdent_position))
3305 .next_back()
3306 {
3307 range_to_truncate.end = outdent_position;
3308 }
3309 }
3310
3311 start_positions.sort_by_key(|b| b.start);
3312
3313 // Find the suggested indentation increases and decreased based on regexes.
3314 let mut regex_outdent_map = HashMap::default();
3315 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3316 let mut start_positions_iter = start_positions.iter().peekable();
3317
3318 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3319 self.for_each_line(
3320 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3321 ..Point::new(row_range.end, 0),
3322 |row, line| {
3323 if config
3324 .decrease_indent_pattern
3325 .as_ref()
3326 .is_some_and(|regex| regex.is_match(line))
3327 {
3328 indent_change_rows.push((row, Ordering::Less));
3329 }
3330 if config
3331 .increase_indent_pattern
3332 .as_ref()
3333 .is_some_and(|regex| regex.is_match(line))
3334 {
3335 indent_change_rows.push((row + 1, Ordering::Greater));
3336 }
3337 while let Some(pos) = start_positions_iter.peek() {
3338 if pos.start.row < row {
3339 let pos = start_positions_iter.next().unwrap();
3340 last_seen_suffix
3341 .entry(pos.suffix.to_string())
3342 .or_default()
3343 .push(pos.start);
3344 } else {
3345 break;
3346 }
3347 }
3348 for rule in &config.decrease_indent_patterns {
3349 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3350 let row_start_column = self.indent_size_for_line(row).len;
3351 let basis_row = rule
3352 .valid_after
3353 .iter()
3354 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3355 .flatten()
3356 .filter(|start_point| start_point.column <= row_start_column)
3357 .max_by_key(|start_point| start_point.row);
3358 if let Some(outdent_to_row) = basis_row {
3359 regex_outdent_map.insert(row, outdent_to_row.row);
3360 }
3361 break;
3362 }
3363 }
3364 },
3365 );
3366
3367 let mut indent_changes = indent_change_rows.into_iter().peekable();
3368 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3369 prev_non_blank_row.unwrap_or(0)
3370 } else {
3371 row_range.start.saturating_sub(1)
3372 };
3373
3374 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3375 Some(row_range.map(move |row| {
3376 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3377
3378 let mut indent_from_prev_row = false;
3379 let mut outdent_from_prev_row = false;
3380 let mut outdent_to_row = u32::MAX;
3381 let mut from_regex = false;
3382
3383 while let Some((indent_row, delta)) = indent_changes.peek() {
3384 match indent_row.cmp(&row) {
3385 Ordering::Equal => match delta {
3386 Ordering::Less => {
3387 from_regex = true;
3388 outdent_from_prev_row = true
3389 }
3390 Ordering::Greater => {
3391 indent_from_prev_row = true;
3392 from_regex = true
3393 }
3394 _ => {}
3395 },
3396
3397 Ordering::Greater => break,
3398 Ordering::Less => {}
3399 }
3400
3401 indent_changes.next();
3402 }
3403
3404 for range in &indent_ranges {
3405 if range.start.row >= row {
3406 break;
3407 }
3408 if range.start.row == prev_row && range.end > row_start {
3409 indent_from_prev_row = true;
3410 }
3411 if range.end > prev_row_start && range.end <= row_start {
3412 outdent_to_row = outdent_to_row.min(range.start.row);
3413 }
3414 }
3415
3416 if let Some(basis_row) = regex_outdent_map.get(&row) {
3417 indent_from_prev_row = false;
3418 outdent_to_row = *basis_row;
3419 from_regex = true;
3420 }
3421
3422 let within_error = error_ranges
3423 .iter()
3424 .any(|e| e.start.row < row && e.end > row_start);
3425
3426 let suggestion = if outdent_to_row == prev_row
3427 || (outdent_from_prev_row && indent_from_prev_row)
3428 {
3429 Some(IndentSuggestion {
3430 basis_row: prev_row,
3431 delta: Ordering::Equal,
3432 within_error: within_error && !from_regex,
3433 })
3434 } else if indent_from_prev_row {
3435 Some(IndentSuggestion {
3436 basis_row: prev_row,
3437 delta: Ordering::Greater,
3438 within_error: within_error && !from_regex,
3439 })
3440 } else if outdent_to_row < prev_row {
3441 Some(IndentSuggestion {
3442 basis_row: outdent_to_row,
3443 delta: Ordering::Equal,
3444 within_error: within_error && !from_regex,
3445 })
3446 } else if outdent_from_prev_row {
3447 Some(IndentSuggestion {
3448 basis_row: prev_row,
3449 delta: Ordering::Less,
3450 within_error: within_error && !from_regex,
3451 })
3452 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3453 {
3454 Some(IndentSuggestion {
3455 basis_row: prev_row,
3456 delta: Ordering::Equal,
3457 within_error: within_error && !from_regex,
3458 })
3459 } else {
3460 None
3461 };
3462
3463 prev_row = row;
3464 prev_row_start = row_start;
3465 suggestion
3466 }))
3467 }
3468
3469 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3470 while row > 0 {
3471 row -= 1;
3472 if !self.is_line_blank(row) {
3473 return Some(row);
3474 }
3475 }
3476 None
3477 }
3478
3479 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3480 let captures = self.syntax.captures(range, &self.text, |grammar| {
3481 grammar
3482 .highlights_config
3483 .as_ref()
3484 .map(|config| &config.query)
3485 });
3486 let highlight_maps = captures
3487 .grammars()
3488 .iter()
3489 .map(|grammar| grammar.highlight_map())
3490 .collect();
3491 (captures, highlight_maps)
3492 }
3493
3494 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3495 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3496 /// returned in chunks where each chunk has a single syntax highlighting style and
3497 /// diagnostic status.
3498 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3499 let range = range.start.to_offset(self)..range.end.to_offset(self);
3500
3501 let mut syntax = None;
3502 if language_aware {
3503 syntax = Some(self.get_highlights(range.clone()));
3504 }
3505 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3506 let diagnostics = language_aware;
3507 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3508 }
3509
3510 pub fn highlighted_text_for_range<T: ToOffset>(
3511 &self,
3512 range: Range<T>,
3513 override_style: Option<HighlightStyle>,
3514 syntax_theme: &SyntaxTheme,
3515 ) -> HighlightedText {
3516 HighlightedText::from_buffer_range(
3517 range,
3518 &self.text,
3519 &self.syntax,
3520 override_style,
3521 syntax_theme,
3522 )
3523 }
3524
3525 /// Invokes the given callback for each line of text in the given range of the buffer.
3526 /// Uses callback to avoid allocating a string for each line.
3527 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3528 let mut line = String::new();
3529 let mut row = range.start.row;
3530 for chunk in self
3531 .as_rope()
3532 .chunks_in_range(range.to_offset(self))
3533 .chain(["\n"])
3534 {
3535 for (newline_ix, text) in chunk.split('\n').enumerate() {
3536 if newline_ix > 0 {
3537 callback(row, &line);
3538 row += 1;
3539 line.clear();
3540 }
3541 line.push_str(text);
3542 }
3543 }
3544 }
3545
3546 /// Iterates over every [`SyntaxLayer`] in the buffer.
3547 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3548 self.syntax_layers_for_range(0..self.len(), true)
3549 }
3550
3551 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3552 let offset = position.to_offset(self);
3553 self.syntax_layers_for_range(offset..offset, false)
3554 .filter(|l| {
3555 if let Some(ranges) = l.included_sub_ranges {
3556 ranges.iter().any(|range| {
3557 let start = range.start.to_offset(self);
3558 start <= offset && {
3559 let end = range.end.to_offset(self);
3560 offset < end
3561 }
3562 })
3563 } else {
3564 l.node().start_byte() <= offset && l.node().end_byte() > offset
3565 }
3566 })
3567 .last()
3568 }
3569
3570 pub fn syntax_layers_for_range<D: ToOffset>(
3571 &self,
3572 range: Range<D>,
3573 include_hidden: bool,
3574 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3575 self.syntax
3576 .layers_for_range(range, &self.text, include_hidden)
3577 }
3578
3579 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3580 &self,
3581 range: Range<D>,
3582 ) -> Option<SyntaxLayer<'_>> {
3583 let range = range.to_offset(self);
3584 self.syntax
3585 .layers_for_range(range, &self.text, false)
3586 .max_by(|a, b| {
3587 if a.depth != b.depth {
3588 a.depth.cmp(&b.depth)
3589 } else if a.offset.0 != b.offset.0 {
3590 a.offset.0.cmp(&b.offset.0)
3591 } else {
3592 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3593 }
3594 })
3595 }
3596
3597 /// Returns the main [`Language`].
3598 pub fn language(&self) -> Option<&Arc<Language>> {
3599 self.language.as_ref()
3600 }
3601
3602 /// Returns the [`Language`] at the given location.
3603 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3604 self.syntax_layer_at(position)
3605 .map(|info| info.language)
3606 .or(self.language.as_ref())
3607 }
3608
3609 /// Returns the settings for the language at the given location.
3610 pub fn settings_at<'a, D: ToOffset>(
3611 &'a self,
3612 position: D,
3613 cx: &'a App,
3614 ) -> Cow<'a, LanguageSettings> {
3615 language_settings(
3616 self.language_at(position).map(|l| l.name()),
3617 self.file.as_ref(),
3618 cx,
3619 )
3620 }
3621
3622 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3623 CharClassifier::new(self.language_scope_at(point))
3624 }
3625
3626 /// Returns the [`LanguageScope`] at the given location.
3627 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3628 let offset = position.to_offset(self);
3629 let mut scope = None;
3630 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3631
3632 // Use the layer that has the smallest node intersecting the given point.
3633 for layer in self
3634 .syntax
3635 .layers_for_range(offset..offset, &self.text, false)
3636 {
3637 let mut cursor = layer.node().walk();
3638
3639 let mut range = None;
3640 loop {
3641 let child_range = cursor.node().byte_range();
3642 if !child_range.contains(&offset) {
3643 break;
3644 }
3645
3646 range = Some(child_range);
3647 if cursor.goto_first_child_for_byte(offset).is_none() {
3648 break;
3649 }
3650 }
3651
3652 if let Some(range) = range
3653 && smallest_range_and_depth.as_ref().is_none_or(
3654 |(smallest_range, smallest_range_depth)| {
3655 if layer.depth > *smallest_range_depth {
3656 true
3657 } else if layer.depth == *smallest_range_depth {
3658 range.len() < smallest_range.len()
3659 } else {
3660 false
3661 }
3662 },
3663 )
3664 {
3665 smallest_range_and_depth = Some((range, layer.depth));
3666 scope = Some(LanguageScope {
3667 language: layer.language.clone(),
3668 override_id: layer.override_id(offset, &self.text),
3669 });
3670 }
3671 }
3672
3673 scope.or_else(|| {
3674 self.language.clone().map(|language| LanguageScope {
3675 language,
3676 override_id: None,
3677 })
3678 })
3679 }
3680
3681 /// Returns a tuple of the range and character kind of the word
3682 /// surrounding the given position.
3683 pub fn surrounding_word<T: ToOffset>(
3684 &self,
3685 start: T,
3686 scope_context: Option<CharScopeContext>,
3687 ) -> (Range<usize>, Option<CharKind>) {
3688 let mut start = start.to_offset(self);
3689 let mut end = start;
3690 let mut next_chars = self.chars_at(start).take(128).peekable();
3691 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3692
3693 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3694 let word_kind = cmp::max(
3695 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3696 next_chars.peek().copied().map(|c| classifier.kind(c)),
3697 );
3698
3699 for ch in prev_chars {
3700 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3701 start -= ch.len_utf8();
3702 } else {
3703 break;
3704 }
3705 }
3706
3707 for ch in next_chars {
3708 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3709 end += ch.len_utf8();
3710 } else {
3711 break;
3712 }
3713 }
3714
3715 (start..end, word_kind)
3716 }
3717
3718 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3719 /// range. When `require_larger` is true, the node found must be larger than the query range.
3720 ///
3721 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3722 /// be moved to the root of the tree.
3723 fn goto_node_enclosing_range(
3724 cursor: &mut tree_sitter::TreeCursor,
3725 query_range: &Range<usize>,
3726 require_larger: bool,
3727 ) -> bool {
3728 let mut ascending = false;
3729 loop {
3730 let mut range = cursor.node().byte_range();
3731 if query_range.is_empty() {
3732 // When the query range is empty and the current node starts after it, move to the
3733 // previous sibling to find the node the containing node.
3734 if range.start > query_range.start {
3735 cursor.goto_previous_sibling();
3736 range = cursor.node().byte_range();
3737 }
3738 } else {
3739 // When the query range is non-empty and the current node ends exactly at the start,
3740 // move to the next sibling to find a node that extends beyond the start.
3741 if range.end == query_range.start {
3742 cursor.goto_next_sibling();
3743 range = cursor.node().byte_range();
3744 }
3745 }
3746
3747 let encloses = range.contains_inclusive(query_range)
3748 && (!require_larger || range.len() > query_range.len());
3749 if !encloses {
3750 ascending = true;
3751 if !cursor.goto_parent() {
3752 return false;
3753 }
3754 continue;
3755 } else if ascending {
3756 return true;
3757 }
3758
3759 // Descend into the current node.
3760 if cursor
3761 .goto_first_child_for_byte(query_range.start)
3762 .is_none()
3763 {
3764 return true;
3765 }
3766 }
3767 }
3768
3769 pub fn syntax_ancestor<'a, T: ToOffset>(
3770 &'a self,
3771 range: Range<T>,
3772 ) -> Option<tree_sitter::Node<'a>> {
3773 let range = range.start.to_offset(self)..range.end.to_offset(self);
3774 let mut result: Option<tree_sitter::Node<'a>> = None;
3775 for layer in self
3776 .syntax
3777 .layers_for_range(range.clone(), &self.text, true)
3778 {
3779 let mut cursor = layer.node().walk();
3780
3781 // Find the node that both contains the range and is larger than it.
3782 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3783 continue;
3784 }
3785
3786 let left_node = cursor.node();
3787 let mut layer_result = left_node;
3788
3789 // For an empty range, try to find another node immediately to the right of the range.
3790 if left_node.end_byte() == range.start {
3791 let mut right_node = None;
3792 while !cursor.goto_next_sibling() {
3793 if !cursor.goto_parent() {
3794 break;
3795 }
3796 }
3797
3798 while cursor.node().start_byte() == range.start {
3799 right_node = Some(cursor.node());
3800 if !cursor.goto_first_child() {
3801 break;
3802 }
3803 }
3804
3805 // If there is a candidate node on both sides of the (empty) range, then
3806 // decide between the two by favoring a named node over an anonymous token.
3807 // If both nodes are the same in that regard, favor the right one.
3808 if let Some(right_node) = right_node
3809 && (right_node.is_named() || !left_node.is_named())
3810 {
3811 layer_result = right_node;
3812 }
3813 }
3814
3815 if let Some(previous_result) = &result
3816 && previous_result.byte_range().len() < layer_result.byte_range().len()
3817 {
3818 continue;
3819 }
3820 result = Some(layer_result);
3821 }
3822
3823 result
3824 }
3825
3826 /// Find the previous sibling syntax node at the given range.
3827 ///
3828 /// This function locates the syntax node that precedes the node containing
3829 /// the given range. It searches hierarchically by:
3830 /// 1. Finding the node that contains the given range
3831 /// 2. Looking for the previous sibling at the same tree level
3832 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3833 ///
3834 /// Returns `None` if there is no previous sibling at any ancestor level.
3835 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3836 &'a self,
3837 range: Range<T>,
3838 ) -> Option<tree_sitter::Node<'a>> {
3839 let range = range.start.to_offset(self)..range.end.to_offset(self);
3840 let mut result: Option<tree_sitter::Node<'a>> = None;
3841
3842 for layer in self
3843 .syntax
3844 .layers_for_range(range.clone(), &self.text, true)
3845 {
3846 let mut cursor = layer.node().walk();
3847
3848 // Find the node that contains the range
3849 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3850 continue;
3851 }
3852
3853 // Look for the previous sibling, moving up ancestor levels if needed
3854 loop {
3855 if cursor.goto_previous_sibling() {
3856 let layer_result = cursor.node();
3857
3858 if let Some(previous_result) = &result {
3859 if previous_result.byte_range().end < layer_result.byte_range().end {
3860 continue;
3861 }
3862 }
3863 result = Some(layer_result);
3864 break;
3865 }
3866
3867 // No sibling found at this level, try moving up to parent
3868 if !cursor.goto_parent() {
3869 break;
3870 }
3871 }
3872 }
3873
3874 result
3875 }
3876
3877 /// Find the next sibling syntax node at the given range.
3878 ///
3879 /// This function locates the syntax node that follows the node containing
3880 /// the given range. It searches hierarchically by:
3881 /// 1. Finding the node that contains the given range
3882 /// 2. Looking for the next sibling at the same tree level
3883 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3884 ///
3885 /// Returns `None` if there is no next sibling at any ancestor level.
3886 pub fn syntax_next_sibling<'a, T: ToOffset>(
3887 &'a self,
3888 range: Range<T>,
3889 ) -> Option<tree_sitter::Node<'a>> {
3890 let range = range.start.to_offset(self)..range.end.to_offset(self);
3891 let mut result: Option<tree_sitter::Node<'a>> = None;
3892
3893 for layer in self
3894 .syntax
3895 .layers_for_range(range.clone(), &self.text, true)
3896 {
3897 let mut cursor = layer.node().walk();
3898
3899 // Find the node that contains the range
3900 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3901 continue;
3902 }
3903
3904 // Look for the next sibling, moving up ancestor levels if needed
3905 loop {
3906 if cursor.goto_next_sibling() {
3907 let layer_result = cursor.node();
3908
3909 if let Some(previous_result) = &result {
3910 if previous_result.byte_range().start > layer_result.byte_range().start {
3911 continue;
3912 }
3913 }
3914 result = Some(layer_result);
3915 break;
3916 }
3917
3918 // No sibling found at this level, try moving up to parent
3919 if !cursor.goto_parent() {
3920 break;
3921 }
3922 }
3923 }
3924
3925 result
3926 }
3927
3928 /// Returns the root syntax node within the given row
3929 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3930 let start_offset = position.to_offset(self);
3931
3932 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3933
3934 let layer = self
3935 .syntax
3936 .layers_for_range(start_offset..start_offset, &self.text, true)
3937 .next()?;
3938
3939 let mut cursor = layer.node().walk();
3940
3941 // Descend to the first leaf that touches the start of the range.
3942 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3943 if cursor.node().end_byte() == start_offset {
3944 cursor.goto_next_sibling();
3945 }
3946 }
3947
3948 // Ascend to the root node within the same row.
3949 while cursor.goto_parent() {
3950 if cursor.node().start_position().row != row {
3951 break;
3952 }
3953 }
3954
3955 Some(cursor.node())
3956 }
3957
3958 /// Returns the outline for the buffer.
3959 ///
3960 /// This method allows passing an optional [`SyntaxTheme`] to
3961 /// syntax-highlight the returned symbols.
3962 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3963 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3964 }
3965
3966 /// Returns all the symbols that contain the given position.
3967 ///
3968 /// This method allows passing an optional [`SyntaxTheme`] to
3969 /// syntax-highlight the returned symbols.
3970 pub fn symbols_containing<T: ToOffset>(
3971 &self,
3972 position: T,
3973 theme: Option<&SyntaxTheme>,
3974 ) -> Vec<OutlineItem<Anchor>> {
3975 let position = position.to_offset(self);
3976 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3977 let end = self.clip_offset(position + 1, Bias::Right);
3978 let mut items = self.outline_items_containing(start..end, false, theme);
3979 let mut prev_depth = None;
3980 items.retain(|item| {
3981 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3982 prev_depth = Some(item.depth);
3983 result
3984 });
3985 items
3986 }
3987
3988 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3989 let range = range.to_offset(self);
3990 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3991 grammar.outline_config.as_ref().map(|c| &c.query)
3992 });
3993 let configs = matches
3994 .grammars()
3995 .iter()
3996 .map(|g| g.outline_config.as_ref().unwrap())
3997 .collect::<Vec<_>>();
3998
3999 while let Some(mat) = matches.peek() {
4000 let config = &configs[mat.grammar_index];
4001 let containing_item_node = maybe!({
4002 let item_node = mat.captures.iter().find_map(|cap| {
4003 if cap.index == config.item_capture_ix {
4004 Some(cap.node)
4005 } else {
4006 None
4007 }
4008 })?;
4009
4010 let item_byte_range = item_node.byte_range();
4011 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4012 None
4013 } else {
4014 Some(item_node)
4015 }
4016 });
4017
4018 if let Some(item_node) = containing_item_node {
4019 return Some(
4020 Point::from_ts_point(item_node.start_position())
4021 ..Point::from_ts_point(item_node.end_position()),
4022 );
4023 }
4024
4025 matches.advance();
4026 }
4027 None
4028 }
4029
4030 pub fn outline_items_containing<T: ToOffset>(
4031 &self,
4032 range: Range<T>,
4033 include_extra_context: bool,
4034 theme: Option<&SyntaxTheme>,
4035 ) -> Vec<OutlineItem<Anchor>> {
4036 self.outline_items_containing_internal(
4037 range,
4038 include_extra_context,
4039 theme,
4040 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4041 )
4042 }
4043
4044 pub fn outline_items_as_points_containing<T: ToOffset>(
4045 &self,
4046 range: Range<T>,
4047 include_extra_context: bool,
4048 theme: Option<&SyntaxTheme>,
4049 ) -> Vec<OutlineItem<Point>> {
4050 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4051 range
4052 })
4053 }
4054
4055 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4056 &self,
4057 range: Range<T>,
4058 include_extra_context: bool,
4059 theme: Option<&SyntaxTheme>,
4060 ) -> Vec<OutlineItem<usize>> {
4061 self.outline_items_containing_internal(
4062 range,
4063 include_extra_context,
4064 theme,
4065 |buffer, range| range.to_offset(buffer),
4066 )
4067 }
4068
4069 fn outline_items_containing_internal<T: ToOffset, U>(
4070 &self,
4071 range: Range<T>,
4072 include_extra_context: bool,
4073 theme: Option<&SyntaxTheme>,
4074 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4075 ) -> Vec<OutlineItem<U>> {
4076 let range = range.to_offset(self);
4077 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4078 grammar.outline_config.as_ref().map(|c| &c.query)
4079 });
4080
4081 let mut items = Vec::new();
4082 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4083 while let Some(mat) = matches.peek() {
4084 let config = matches.grammars()[mat.grammar_index]
4085 .outline_config
4086 .as_ref()
4087 .unwrap();
4088 if let Some(item) =
4089 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4090 {
4091 items.push(item);
4092 } else if let Some(capture) = mat
4093 .captures
4094 .iter()
4095 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4096 {
4097 let capture_range = capture.node.start_position()..capture.node.end_position();
4098 let mut capture_row_range =
4099 capture_range.start.row as u32..capture_range.end.row as u32;
4100 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4101 {
4102 capture_row_range.end -= 1;
4103 }
4104 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4105 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4106 last_row_range.end = capture_row_range.end;
4107 } else {
4108 annotation_row_ranges.push(capture_row_range);
4109 }
4110 } else {
4111 annotation_row_ranges.push(capture_row_range);
4112 }
4113 }
4114 matches.advance();
4115 }
4116
4117 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4118
4119 // Assign depths based on containment relationships and convert to anchors.
4120 let mut item_ends_stack = Vec::<Point>::new();
4121 let mut anchor_items = Vec::new();
4122 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4123 for item in items {
4124 while let Some(last_end) = item_ends_stack.last().copied() {
4125 if last_end < item.range.end {
4126 item_ends_stack.pop();
4127 } else {
4128 break;
4129 }
4130 }
4131
4132 let mut annotation_row_range = None;
4133 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4134 let row_preceding_item = item.range.start.row.saturating_sub(1);
4135 if next_annotation_row_range.end < row_preceding_item {
4136 annotation_row_ranges.next();
4137 } else {
4138 if next_annotation_row_range.end == row_preceding_item {
4139 annotation_row_range = Some(next_annotation_row_range.clone());
4140 annotation_row_ranges.next();
4141 }
4142 break;
4143 }
4144 }
4145
4146 anchor_items.push(OutlineItem {
4147 depth: item_ends_stack.len(),
4148 range: range_callback(self, item.range.clone()),
4149 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4150 text: item.text,
4151 highlight_ranges: item.highlight_ranges,
4152 name_ranges: item.name_ranges,
4153 body_range: item.body_range.map(|r| range_callback(self, r)),
4154 annotation_range: annotation_row_range.map(|annotation_range| {
4155 let point_range = Point::new(annotation_range.start, 0)
4156 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4157 range_callback(self, point_range)
4158 }),
4159 });
4160 item_ends_stack.push(item.range.end);
4161 }
4162
4163 anchor_items
4164 }
4165
4166 fn next_outline_item(
4167 &self,
4168 config: &OutlineConfig,
4169 mat: &SyntaxMapMatch,
4170 range: &Range<usize>,
4171 include_extra_context: bool,
4172 theme: Option<&SyntaxTheme>,
4173 ) -> Option<OutlineItem<Point>> {
4174 let item_node = mat.captures.iter().find_map(|cap| {
4175 if cap.index == config.item_capture_ix {
4176 Some(cap.node)
4177 } else {
4178 None
4179 }
4180 })?;
4181
4182 let item_byte_range = item_node.byte_range();
4183 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4184 return None;
4185 }
4186 let item_point_range = Point::from_ts_point(item_node.start_position())
4187 ..Point::from_ts_point(item_node.end_position());
4188
4189 let mut open_point = None;
4190 let mut close_point = None;
4191
4192 let mut buffer_ranges = Vec::new();
4193 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4194 let mut range = node.start_byte()..node.end_byte();
4195 let start = node.start_position();
4196 if node.end_position().row > start.row {
4197 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4198 }
4199
4200 if !range.is_empty() {
4201 buffer_ranges.push((range, node_is_name));
4202 }
4203 };
4204
4205 for capture in mat.captures {
4206 if capture.index == config.name_capture_ix {
4207 add_to_buffer_ranges(capture.node, true);
4208 } else if Some(capture.index) == config.context_capture_ix
4209 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4210 {
4211 add_to_buffer_ranges(capture.node, false);
4212 } else {
4213 if Some(capture.index) == config.open_capture_ix {
4214 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4215 } else if Some(capture.index) == config.close_capture_ix {
4216 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4217 }
4218 }
4219 }
4220
4221 if buffer_ranges.is_empty() {
4222 return None;
4223 }
4224 let source_range_for_text =
4225 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4226
4227 let mut text = String::new();
4228 let mut highlight_ranges = Vec::new();
4229 let mut name_ranges = Vec::new();
4230 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4231 let mut last_buffer_range_end = 0;
4232 for (buffer_range, is_name) in buffer_ranges {
4233 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4234 if space_added {
4235 text.push(' ');
4236 }
4237 let before_append_len = text.len();
4238 let mut offset = buffer_range.start;
4239 chunks.seek(buffer_range.clone());
4240 for mut chunk in chunks.by_ref() {
4241 if chunk.text.len() > buffer_range.end - offset {
4242 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4243 offset = buffer_range.end;
4244 } else {
4245 offset += chunk.text.len();
4246 }
4247 let style = chunk
4248 .syntax_highlight_id
4249 .zip(theme)
4250 .and_then(|(highlight, theme)| highlight.style(theme));
4251 if let Some(style) = style {
4252 let start = text.len();
4253 let end = start + chunk.text.len();
4254 highlight_ranges.push((start..end, style));
4255 }
4256 text.push_str(chunk.text);
4257 if offset >= buffer_range.end {
4258 break;
4259 }
4260 }
4261 if is_name {
4262 let after_append_len = text.len();
4263 let start = if space_added && !name_ranges.is_empty() {
4264 before_append_len - 1
4265 } else {
4266 before_append_len
4267 };
4268 name_ranges.push(start..after_append_len);
4269 }
4270 last_buffer_range_end = buffer_range.end;
4271 }
4272
4273 Some(OutlineItem {
4274 depth: 0, // We'll calculate the depth later
4275 range: item_point_range,
4276 source_range_for_text: source_range_for_text.to_point(self),
4277 text,
4278 highlight_ranges,
4279 name_ranges,
4280 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4281 annotation_range: None,
4282 })
4283 }
4284
4285 pub fn function_body_fold_ranges<T: ToOffset>(
4286 &self,
4287 within: Range<T>,
4288 ) -> impl Iterator<Item = Range<usize>> + '_ {
4289 self.text_object_ranges(within, TreeSitterOptions::default())
4290 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4291 }
4292
4293 /// For each grammar in the language, runs the provided
4294 /// [`tree_sitter::Query`] against the given range.
4295 pub fn matches(
4296 &self,
4297 range: Range<usize>,
4298 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4299 ) -> SyntaxMapMatches<'_> {
4300 self.syntax.matches(range, self, query)
4301 }
4302
4303 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4304 /// Hence, may return more bracket pairs than the range contains.
4305 ///
4306 /// Will omit known chunks.
4307 /// The resulting bracket match collections are not ordered.
4308 pub fn fetch_bracket_ranges(
4309 &self,
4310 range: Range<usize>,
4311 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4312 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4313 let mut all_bracket_matches = HashMap::default();
4314
4315 for chunk in self
4316 .tree_sitter_data
4317 .chunks
4318 .applicable_chunks(&[range.to_point(self)])
4319 {
4320 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4321 continue;
4322 }
4323 let chunk_range = chunk.anchor_range();
4324 let chunk_range = chunk_range.to_offset(&self);
4325
4326 if let Some(cached_brackets) =
4327 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4328 {
4329 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4330 continue;
4331 }
4332
4333 let mut all_brackets = Vec::new();
4334 let mut opens = Vec::new();
4335 let mut color_pairs = Vec::new();
4336
4337 let mut matches = self
4338 .syntax
4339 .matches(chunk_range.clone(), &self.text, |grammar| {
4340 grammar.brackets_config.as_ref().map(|c| &c.query)
4341 });
4342 let configs = matches
4343 .grammars()
4344 .iter()
4345 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4346 .collect::<Vec<_>>();
4347
4348 while let Some(mat) = matches.peek() {
4349 let mut open = None;
4350 let mut close = None;
4351 let syntax_layer_depth = mat.depth;
4352 let config = configs[mat.grammar_index];
4353 let pattern = &config.patterns[mat.pattern_index];
4354 for capture in mat.captures {
4355 if capture.index == config.open_capture_ix {
4356 open = Some(capture.node.byte_range());
4357 } else if capture.index == config.close_capture_ix {
4358 close = Some(capture.node.byte_range());
4359 }
4360 }
4361
4362 matches.advance();
4363
4364 let Some((open_range, close_range)) = open.zip(close) else {
4365 continue;
4366 };
4367
4368 let bracket_range = open_range.start..=close_range.end;
4369 if !bracket_range.overlaps(&chunk_range) {
4370 continue;
4371 }
4372
4373 let index = all_brackets.len();
4374 all_brackets.push(BracketMatch {
4375 open_range: open_range.clone(),
4376 close_range: close_range.clone(),
4377 newline_only: pattern.newline_only,
4378 syntax_layer_depth,
4379 color_index: None,
4380 });
4381
4382 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4383 // bracket will match the entire tag with all text inside.
4384 // For now, avoid highlighting any pair that has more than single char in each bracket.
4385 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4386 let should_color =
4387 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4388 if should_color {
4389 opens.push(open_range.clone());
4390 color_pairs.push((open_range, close_range, index));
4391 }
4392 }
4393
4394 opens.sort_by_key(|r| (r.start, r.end));
4395 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4396 color_pairs.sort_by_key(|(_, close, _)| close.end);
4397
4398 let mut open_stack = Vec::new();
4399 let mut open_index = 0;
4400 for (open, close, index) in color_pairs {
4401 while open_index < opens.len() && opens[open_index].start < close.start {
4402 open_stack.push(opens[open_index].clone());
4403 open_index += 1;
4404 }
4405
4406 if open_stack.last() == Some(&open) {
4407 let depth_index = open_stack.len() - 1;
4408 all_brackets[index].color_index = Some(depth_index);
4409 open_stack.pop();
4410 }
4411 }
4412
4413 all_brackets.sort_by_key(|bracket_match| {
4414 (bracket_match.open_range.start, bracket_match.open_range.end)
4415 });
4416
4417 if let empty_slot @ None =
4418 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4419 {
4420 *empty_slot = Some(all_brackets.clone());
4421 }
4422 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4423 }
4424
4425 all_bracket_matches
4426 }
4427
4428 pub fn all_bracket_ranges(
4429 &self,
4430 range: Range<usize>,
4431 ) -> impl Iterator<Item = BracketMatch<usize>> {
4432 self.fetch_bracket_ranges(range.clone(), None)
4433 .into_values()
4434 .flatten()
4435 .filter(move |bracket_match| {
4436 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4437 bracket_range.overlaps(&range)
4438 })
4439 }
4440
4441 /// Returns bracket range pairs overlapping or adjacent to `range`
4442 pub fn bracket_ranges<T: ToOffset>(
4443 &self,
4444 range: Range<T>,
4445 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4446 // Find bracket pairs that *inclusively* contain the given range.
4447 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4448 self.all_bracket_ranges(range)
4449 .filter(|pair| !pair.newline_only)
4450 }
4451
4452 pub fn debug_variables_query<T: ToOffset>(
4453 &self,
4454 range: Range<T>,
4455 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4456 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4457
4458 let mut matches = self.syntax.matches_with_options(
4459 range.clone(),
4460 &self.text,
4461 TreeSitterOptions::default(),
4462 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4463 );
4464
4465 let configs = matches
4466 .grammars()
4467 .iter()
4468 .map(|grammar| grammar.debug_variables_config.as_ref())
4469 .collect::<Vec<_>>();
4470
4471 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4472
4473 iter::from_fn(move || {
4474 loop {
4475 while let Some(capture) = captures.pop() {
4476 if capture.0.overlaps(&range) {
4477 return Some(capture);
4478 }
4479 }
4480
4481 let mat = matches.peek()?;
4482
4483 let Some(config) = configs[mat.grammar_index].as_ref() else {
4484 matches.advance();
4485 continue;
4486 };
4487
4488 for capture in mat.captures {
4489 let Some(ix) = config
4490 .objects_by_capture_ix
4491 .binary_search_by_key(&capture.index, |e| e.0)
4492 .ok()
4493 else {
4494 continue;
4495 };
4496 let text_object = config.objects_by_capture_ix[ix].1;
4497 let byte_range = capture.node.byte_range();
4498
4499 let mut found = false;
4500 for (range, existing) in captures.iter_mut() {
4501 if existing == &text_object {
4502 range.start = range.start.min(byte_range.start);
4503 range.end = range.end.max(byte_range.end);
4504 found = true;
4505 break;
4506 }
4507 }
4508
4509 if !found {
4510 captures.push((byte_range, text_object));
4511 }
4512 }
4513
4514 matches.advance();
4515 }
4516 })
4517 }
4518
4519 pub fn text_object_ranges<T: ToOffset>(
4520 &self,
4521 range: Range<T>,
4522 options: TreeSitterOptions,
4523 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4524 let range =
4525 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4526
4527 let mut matches =
4528 self.syntax
4529 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4530 grammar.text_object_config.as_ref().map(|c| &c.query)
4531 });
4532
4533 let configs = matches
4534 .grammars()
4535 .iter()
4536 .map(|grammar| grammar.text_object_config.as_ref())
4537 .collect::<Vec<_>>();
4538
4539 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4540
4541 iter::from_fn(move || {
4542 loop {
4543 while let Some(capture) = captures.pop() {
4544 if capture.0.overlaps(&range) {
4545 return Some(capture);
4546 }
4547 }
4548
4549 let mat = matches.peek()?;
4550
4551 let Some(config) = configs[mat.grammar_index].as_ref() else {
4552 matches.advance();
4553 continue;
4554 };
4555
4556 for capture in mat.captures {
4557 let Some(ix) = config
4558 .text_objects_by_capture_ix
4559 .binary_search_by_key(&capture.index, |e| e.0)
4560 .ok()
4561 else {
4562 continue;
4563 };
4564 let text_object = config.text_objects_by_capture_ix[ix].1;
4565 let byte_range = capture.node.byte_range();
4566
4567 let mut found = false;
4568 for (range, existing) in captures.iter_mut() {
4569 if existing == &text_object {
4570 range.start = range.start.min(byte_range.start);
4571 range.end = range.end.max(byte_range.end);
4572 found = true;
4573 break;
4574 }
4575 }
4576
4577 if !found {
4578 captures.push((byte_range, text_object));
4579 }
4580 }
4581
4582 matches.advance();
4583 }
4584 })
4585 }
4586
4587 /// Returns enclosing bracket ranges containing the given range
4588 pub fn enclosing_bracket_ranges<T: ToOffset>(
4589 &self,
4590 range: Range<T>,
4591 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4592 let range = range.start.to_offset(self)..range.end.to_offset(self);
4593
4594 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4595 let max_depth = result
4596 .iter()
4597 .map(|mat| mat.syntax_layer_depth)
4598 .max()
4599 .unwrap_or(0);
4600 result.into_iter().filter(move |pair| {
4601 pair.open_range.start <= range.start
4602 && pair.close_range.end >= range.end
4603 && pair.syntax_layer_depth == max_depth
4604 })
4605 }
4606
4607 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4608 ///
4609 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4610 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4611 &self,
4612 range: Range<T>,
4613 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4614 ) -> Option<(Range<usize>, Range<usize>)> {
4615 let range = range.start.to_offset(self)..range.end.to_offset(self);
4616
4617 // Get the ranges of the innermost pair of brackets.
4618 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4619
4620 for pair in self.enclosing_bracket_ranges(range) {
4621 if let Some(range_filter) = range_filter
4622 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4623 {
4624 continue;
4625 }
4626
4627 let len = pair.close_range.end - pair.open_range.start;
4628
4629 if let Some((existing_open, existing_close)) = &result {
4630 let existing_len = existing_close.end - existing_open.start;
4631 if len > existing_len {
4632 continue;
4633 }
4634 }
4635
4636 result = Some((pair.open_range, pair.close_range));
4637 }
4638
4639 result
4640 }
4641
4642 /// Returns anchor ranges for any matches of the redaction query.
4643 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4644 /// will be run on the relevant section of the buffer.
4645 pub fn redacted_ranges<T: ToOffset>(
4646 &self,
4647 range: Range<T>,
4648 ) -> impl Iterator<Item = Range<usize>> + '_ {
4649 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4650 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4651 grammar
4652 .redactions_config
4653 .as_ref()
4654 .map(|config| &config.query)
4655 });
4656
4657 let configs = syntax_matches
4658 .grammars()
4659 .iter()
4660 .map(|grammar| grammar.redactions_config.as_ref())
4661 .collect::<Vec<_>>();
4662
4663 iter::from_fn(move || {
4664 let redacted_range = syntax_matches
4665 .peek()
4666 .and_then(|mat| {
4667 configs[mat.grammar_index].and_then(|config| {
4668 mat.captures
4669 .iter()
4670 .find(|capture| capture.index == config.redaction_capture_ix)
4671 })
4672 })
4673 .map(|mat| mat.node.byte_range());
4674 syntax_matches.advance();
4675 redacted_range
4676 })
4677 }
4678
4679 pub fn injections_intersecting_range<T: ToOffset>(
4680 &self,
4681 range: Range<T>,
4682 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4683 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4684
4685 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4686 grammar
4687 .injection_config
4688 .as_ref()
4689 .map(|config| &config.query)
4690 });
4691
4692 let configs = syntax_matches
4693 .grammars()
4694 .iter()
4695 .map(|grammar| grammar.injection_config.as_ref())
4696 .collect::<Vec<_>>();
4697
4698 iter::from_fn(move || {
4699 let ranges = syntax_matches.peek().and_then(|mat| {
4700 let config = &configs[mat.grammar_index]?;
4701 let content_capture_range = mat.captures.iter().find_map(|capture| {
4702 if capture.index == config.content_capture_ix {
4703 Some(capture.node.byte_range())
4704 } else {
4705 None
4706 }
4707 })?;
4708 let language = self.language_at(content_capture_range.start)?;
4709 Some((content_capture_range, language))
4710 });
4711 syntax_matches.advance();
4712 ranges
4713 })
4714 }
4715
4716 pub fn runnable_ranges(
4717 &self,
4718 offset_range: Range<usize>,
4719 ) -> impl Iterator<Item = RunnableRange> + '_ {
4720 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4721 grammar.runnable_config.as_ref().map(|config| &config.query)
4722 });
4723
4724 let test_configs = syntax_matches
4725 .grammars()
4726 .iter()
4727 .map(|grammar| grammar.runnable_config.as_ref())
4728 .collect::<Vec<_>>();
4729
4730 iter::from_fn(move || {
4731 loop {
4732 let mat = syntax_matches.peek()?;
4733
4734 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4735 let mut run_range = None;
4736 let full_range = mat.captures.iter().fold(
4737 Range {
4738 start: usize::MAX,
4739 end: 0,
4740 },
4741 |mut acc, next| {
4742 let byte_range = next.node.byte_range();
4743 if acc.start > byte_range.start {
4744 acc.start = byte_range.start;
4745 }
4746 if acc.end < byte_range.end {
4747 acc.end = byte_range.end;
4748 }
4749 acc
4750 },
4751 );
4752 if full_range.start > full_range.end {
4753 // We did not find a full spanning range of this match.
4754 return None;
4755 }
4756 let extra_captures: SmallVec<[_; 1]> =
4757 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4758 test_configs
4759 .extra_captures
4760 .get(capture.index as usize)
4761 .cloned()
4762 .and_then(|tag_name| match tag_name {
4763 RunnableCapture::Named(name) => {
4764 Some((capture.node.byte_range(), name))
4765 }
4766 RunnableCapture::Run => {
4767 let _ = run_range.insert(capture.node.byte_range());
4768 None
4769 }
4770 })
4771 }));
4772 let run_range = run_range?;
4773 let tags = test_configs
4774 .query
4775 .property_settings(mat.pattern_index)
4776 .iter()
4777 .filter_map(|property| {
4778 if *property.key == *"tag" {
4779 property
4780 .value
4781 .as_ref()
4782 .map(|value| RunnableTag(value.to_string().into()))
4783 } else {
4784 None
4785 }
4786 })
4787 .collect();
4788 let extra_captures = extra_captures
4789 .into_iter()
4790 .map(|(range, name)| {
4791 (
4792 name.to_string(),
4793 self.text_for_range(range).collect::<String>(),
4794 )
4795 })
4796 .collect();
4797 // All tags should have the same range.
4798 Some(RunnableRange {
4799 run_range,
4800 full_range,
4801 runnable: Runnable {
4802 tags,
4803 language: mat.language,
4804 buffer: self.remote_id(),
4805 },
4806 extra_captures,
4807 buffer_id: self.remote_id(),
4808 })
4809 });
4810
4811 syntax_matches.advance();
4812 if test_range.is_some() {
4813 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4814 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4815 return test_range;
4816 }
4817 }
4818 })
4819 }
4820
4821 /// Returns selections for remote peers intersecting the given range.
4822 #[allow(clippy::type_complexity)]
4823 pub fn selections_in_range(
4824 &self,
4825 range: Range<Anchor>,
4826 include_local: bool,
4827 ) -> impl Iterator<
4828 Item = (
4829 ReplicaId,
4830 bool,
4831 CursorShape,
4832 impl Iterator<Item = &Selection<Anchor>> + '_,
4833 ),
4834 > + '_ {
4835 self.remote_selections
4836 .iter()
4837 .filter(move |(replica_id, set)| {
4838 (include_local || **replica_id != self.text.replica_id())
4839 && !set.selections.is_empty()
4840 })
4841 .map(move |(replica_id, set)| {
4842 let start_ix = match set.selections.binary_search_by(|probe| {
4843 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4844 }) {
4845 Ok(ix) | Err(ix) => ix,
4846 };
4847 let end_ix = match set.selections.binary_search_by(|probe| {
4848 probe.start.cmp(&range.end, self).then(Ordering::Less)
4849 }) {
4850 Ok(ix) | Err(ix) => ix,
4851 };
4852
4853 (
4854 *replica_id,
4855 set.line_mode,
4856 set.cursor_shape,
4857 set.selections[start_ix..end_ix].iter(),
4858 )
4859 })
4860 }
4861
4862 /// Returns if the buffer contains any diagnostics.
4863 pub fn has_diagnostics(&self) -> bool {
4864 !self.diagnostics.is_empty()
4865 }
4866
4867 /// Returns all the diagnostics intersecting the given range.
4868 pub fn diagnostics_in_range<'a, T, O>(
4869 &'a self,
4870 search_range: Range<T>,
4871 reversed: bool,
4872 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4873 where
4874 T: 'a + Clone + ToOffset,
4875 O: 'a + FromAnchor,
4876 {
4877 let mut iterators: Vec<_> = self
4878 .diagnostics
4879 .iter()
4880 .map(|(_, collection)| {
4881 collection
4882 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4883 .peekable()
4884 })
4885 .collect();
4886
4887 std::iter::from_fn(move || {
4888 let (next_ix, _) = iterators
4889 .iter_mut()
4890 .enumerate()
4891 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4892 .min_by(|(_, a), (_, b)| {
4893 let cmp = a
4894 .range
4895 .start
4896 .cmp(&b.range.start, self)
4897 // when range is equal, sort by diagnostic severity
4898 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4899 // and stabilize order with group_id
4900 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4901 if reversed { cmp.reverse() } else { cmp }
4902 })?;
4903 iterators[next_ix]
4904 .next()
4905 .map(
4906 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4907 diagnostic,
4908 range: FromAnchor::from_anchor(&range.start, self)
4909 ..FromAnchor::from_anchor(&range.end, self),
4910 },
4911 )
4912 })
4913 }
4914
4915 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4916 /// should be used instead.
4917 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4918 &self.diagnostics
4919 }
4920
4921 /// Returns all the diagnostic groups associated with the given
4922 /// language server ID. If no language server ID is provided,
4923 /// all diagnostics groups are returned.
4924 pub fn diagnostic_groups(
4925 &self,
4926 language_server_id: Option<LanguageServerId>,
4927 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4928 let mut groups = Vec::new();
4929
4930 if let Some(language_server_id) = language_server_id {
4931 if let Ok(ix) = self
4932 .diagnostics
4933 .binary_search_by_key(&language_server_id, |e| e.0)
4934 {
4935 self.diagnostics[ix]
4936 .1
4937 .groups(language_server_id, &mut groups, self);
4938 }
4939 } else {
4940 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4941 diagnostics.groups(*language_server_id, &mut groups, self);
4942 }
4943 }
4944
4945 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4946 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4947 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4948 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4949 });
4950
4951 groups
4952 }
4953
4954 /// Returns an iterator over the diagnostics for the given group.
4955 pub fn diagnostic_group<O>(
4956 &self,
4957 group_id: usize,
4958 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4959 where
4960 O: FromAnchor + 'static,
4961 {
4962 self.diagnostics
4963 .iter()
4964 .flat_map(move |(_, set)| set.group(group_id, self))
4965 }
4966
4967 /// An integer version number that accounts for all updates besides
4968 /// the buffer's text itself (which is versioned via a version vector).
4969 pub fn non_text_state_update_count(&self) -> usize {
4970 self.non_text_state_update_count
4971 }
4972
4973 /// An integer version that changes when the buffer's syntax changes.
4974 pub fn syntax_update_count(&self) -> usize {
4975 self.syntax.update_count()
4976 }
4977
4978 /// Returns a snapshot of underlying file.
4979 pub fn file(&self) -> Option<&Arc<dyn File>> {
4980 self.file.as_ref()
4981 }
4982
4983 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4984 if let Some(file) = self.file() {
4985 if file.path().file_name().is_none() || include_root {
4986 Some(file.full_path(cx).to_string_lossy().into_owned())
4987 } else {
4988 Some(file.path().display(file.path_style(cx)).to_string())
4989 }
4990 } else {
4991 None
4992 }
4993 }
4994
4995 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4996 let query_str = query.fuzzy_contents;
4997 if query_str.is_some_and(|query| query.is_empty()) {
4998 return BTreeMap::default();
4999 }
5000
5001 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5002 language,
5003 override_id: None,
5004 }));
5005
5006 let mut query_ix = 0;
5007 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5008 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5009
5010 let mut words = BTreeMap::default();
5011 let mut current_word_start_ix = None;
5012 let mut chunk_ix = query.range.start;
5013 for chunk in self.chunks(query.range, false) {
5014 for (i, c) in chunk.text.char_indices() {
5015 let ix = chunk_ix + i;
5016 if classifier.is_word(c) {
5017 if current_word_start_ix.is_none() {
5018 current_word_start_ix = Some(ix);
5019 }
5020
5021 if let Some(query_chars) = &query_chars
5022 && query_ix < query_len
5023 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5024 {
5025 query_ix += 1;
5026 }
5027 continue;
5028 } else if let Some(word_start) = current_word_start_ix.take()
5029 && query_ix == query_len
5030 {
5031 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5032 let mut word_text = self.text_for_range(word_start..ix).peekable();
5033 let first_char = word_text
5034 .peek()
5035 .and_then(|first_chunk| first_chunk.chars().next());
5036 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5037 if !query.skip_digits
5038 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5039 {
5040 words.insert(word_text.collect(), word_range);
5041 }
5042 }
5043 query_ix = 0;
5044 }
5045 chunk_ix += chunk.text.len();
5046 }
5047
5048 words
5049 }
5050}
5051
5052pub struct WordsQuery<'a> {
5053 /// Only returns words with all chars from the fuzzy string in them.
5054 pub fuzzy_contents: Option<&'a str>,
5055 /// Skips words that start with a digit.
5056 pub skip_digits: bool,
5057 /// Buffer offset range, to look for words.
5058 pub range: Range<usize>,
5059}
5060
5061fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5062 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5063}
5064
5065fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5066 let mut result = IndentSize::spaces(0);
5067 for c in text {
5068 let kind = match c {
5069 ' ' => IndentKind::Space,
5070 '\t' => IndentKind::Tab,
5071 _ => break,
5072 };
5073 if result.len == 0 {
5074 result.kind = kind;
5075 }
5076 result.len += 1;
5077 }
5078 result
5079}
5080
5081impl Clone for BufferSnapshot {
5082 fn clone(&self) -> Self {
5083 Self {
5084 text: self.text.clone(),
5085 syntax: self.syntax.clone(),
5086 file: self.file.clone(),
5087 remote_selections: self.remote_selections.clone(),
5088 diagnostics: self.diagnostics.clone(),
5089 language: self.language.clone(),
5090 tree_sitter_data: self.tree_sitter_data.clone(),
5091 non_text_state_update_count: self.non_text_state_update_count,
5092 }
5093 }
5094}
5095
5096impl Deref for BufferSnapshot {
5097 type Target = text::BufferSnapshot;
5098
5099 fn deref(&self) -> &Self::Target {
5100 &self.text
5101 }
5102}
5103
5104unsafe impl Send for BufferChunks<'_> {}
5105
5106impl<'a> BufferChunks<'a> {
5107 pub(crate) fn new(
5108 text: &'a Rope,
5109 range: Range<usize>,
5110 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5111 diagnostics: bool,
5112 buffer_snapshot: Option<&'a BufferSnapshot>,
5113 ) -> Self {
5114 let mut highlights = None;
5115 if let Some((captures, highlight_maps)) = syntax {
5116 highlights = Some(BufferChunkHighlights {
5117 captures,
5118 next_capture: None,
5119 stack: Default::default(),
5120 highlight_maps,
5121 })
5122 }
5123
5124 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5125 let chunks = text.chunks_in_range(range.clone());
5126
5127 let mut this = BufferChunks {
5128 range,
5129 buffer_snapshot,
5130 chunks,
5131 diagnostic_endpoints,
5132 error_depth: 0,
5133 warning_depth: 0,
5134 information_depth: 0,
5135 hint_depth: 0,
5136 unnecessary_depth: 0,
5137 underline: true,
5138 highlights,
5139 };
5140 this.initialize_diagnostic_endpoints();
5141 this
5142 }
5143
5144 /// Seeks to the given byte offset in the buffer.
5145 pub fn seek(&mut self, range: Range<usize>) {
5146 let old_range = std::mem::replace(&mut self.range, range.clone());
5147 self.chunks.set_range(self.range.clone());
5148 if let Some(highlights) = self.highlights.as_mut() {
5149 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5150 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5151 highlights
5152 .stack
5153 .retain(|(end_offset, _)| *end_offset > range.start);
5154 if let Some(capture) = &highlights.next_capture
5155 && range.start >= capture.node.start_byte()
5156 {
5157 let next_capture_end = capture.node.end_byte();
5158 if range.start < next_capture_end {
5159 highlights.stack.push((
5160 next_capture_end,
5161 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5162 ));
5163 }
5164 highlights.next_capture.take();
5165 }
5166 } else if let Some(snapshot) = self.buffer_snapshot {
5167 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5168 *highlights = BufferChunkHighlights {
5169 captures,
5170 next_capture: None,
5171 stack: Default::default(),
5172 highlight_maps,
5173 };
5174 } else {
5175 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5176 // Seeking such BufferChunks is not supported.
5177 debug_assert!(
5178 false,
5179 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5180 );
5181 }
5182
5183 highlights.captures.set_byte_range(self.range.clone());
5184 self.initialize_diagnostic_endpoints();
5185 }
5186 }
5187
5188 fn initialize_diagnostic_endpoints(&mut self) {
5189 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5190 && let Some(buffer) = self.buffer_snapshot
5191 {
5192 let mut diagnostic_endpoints = Vec::new();
5193 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5194 diagnostic_endpoints.push(DiagnosticEndpoint {
5195 offset: entry.range.start,
5196 is_start: true,
5197 severity: entry.diagnostic.severity,
5198 is_unnecessary: entry.diagnostic.is_unnecessary,
5199 underline: entry.diagnostic.underline,
5200 });
5201 diagnostic_endpoints.push(DiagnosticEndpoint {
5202 offset: entry.range.end,
5203 is_start: false,
5204 severity: entry.diagnostic.severity,
5205 is_unnecessary: entry.diagnostic.is_unnecessary,
5206 underline: entry.diagnostic.underline,
5207 });
5208 }
5209 diagnostic_endpoints
5210 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5211 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5212 self.hint_depth = 0;
5213 self.error_depth = 0;
5214 self.warning_depth = 0;
5215 self.information_depth = 0;
5216 }
5217 }
5218
5219 /// The current byte offset in the buffer.
5220 pub fn offset(&self) -> usize {
5221 self.range.start
5222 }
5223
5224 pub fn range(&self) -> Range<usize> {
5225 self.range.clone()
5226 }
5227
5228 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5229 let depth = match endpoint.severity {
5230 DiagnosticSeverity::ERROR => &mut self.error_depth,
5231 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5232 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5233 DiagnosticSeverity::HINT => &mut self.hint_depth,
5234 _ => return,
5235 };
5236 if endpoint.is_start {
5237 *depth += 1;
5238 } else {
5239 *depth -= 1;
5240 }
5241
5242 if endpoint.is_unnecessary {
5243 if endpoint.is_start {
5244 self.unnecessary_depth += 1;
5245 } else {
5246 self.unnecessary_depth -= 1;
5247 }
5248 }
5249 }
5250
5251 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5252 if self.error_depth > 0 {
5253 Some(DiagnosticSeverity::ERROR)
5254 } else if self.warning_depth > 0 {
5255 Some(DiagnosticSeverity::WARNING)
5256 } else if self.information_depth > 0 {
5257 Some(DiagnosticSeverity::INFORMATION)
5258 } else if self.hint_depth > 0 {
5259 Some(DiagnosticSeverity::HINT)
5260 } else {
5261 None
5262 }
5263 }
5264
5265 fn current_code_is_unnecessary(&self) -> bool {
5266 self.unnecessary_depth > 0
5267 }
5268}
5269
5270impl<'a> Iterator for BufferChunks<'a> {
5271 type Item = Chunk<'a>;
5272
5273 fn next(&mut self) -> Option<Self::Item> {
5274 let mut next_capture_start = usize::MAX;
5275 let mut next_diagnostic_endpoint = usize::MAX;
5276
5277 if let Some(highlights) = self.highlights.as_mut() {
5278 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5279 if *parent_capture_end <= self.range.start {
5280 highlights.stack.pop();
5281 } else {
5282 break;
5283 }
5284 }
5285
5286 if highlights.next_capture.is_none() {
5287 highlights.next_capture = highlights.captures.next();
5288 }
5289
5290 while let Some(capture) = highlights.next_capture.as_ref() {
5291 if self.range.start < capture.node.start_byte() {
5292 next_capture_start = capture.node.start_byte();
5293 break;
5294 } else {
5295 let highlight_id =
5296 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5297 highlights
5298 .stack
5299 .push((capture.node.end_byte(), highlight_id));
5300 highlights.next_capture = highlights.captures.next();
5301 }
5302 }
5303 }
5304
5305 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5306 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5307 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5308 if endpoint.offset <= self.range.start {
5309 self.update_diagnostic_depths(endpoint);
5310 diagnostic_endpoints.next();
5311 self.underline = endpoint.underline;
5312 } else {
5313 next_diagnostic_endpoint = endpoint.offset;
5314 break;
5315 }
5316 }
5317 }
5318 self.diagnostic_endpoints = diagnostic_endpoints;
5319
5320 if let Some(ChunkBitmaps {
5321 text: chunk,
5322 chars: chars_map,
5323 tabs,
5324 }) = self.chunks.peek_with_bitmaps()
5325 {
5326 let chunk_start = self.range.start;
5327 let mut chunk_end = (self.chunks.offset() + chunk.len())
5328 .min(next_capture_start)
5329 .min(next_diagnostic_endpoint);
5330 let mut highlight_id = None;
5331 if let Some(highlights) = self.highlights.as_ref()
5332 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5333 {
5334 chunk_end = chunk_end.min(*parent_capture_end);
5335 highlight_id = Some(*parent_highlight_id);
5336 }
5337 let bit_start = chunk_start - self.chunks.offset();
5338 let bit_end = chunk_end - self.chunks.offset();
5339
5340 let slice = &chunk[bit_start..bit_end];
5341
5342 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5343 let tabs = (tabs >> bit_start) & mask;
5344 let chars = (chars_map >> bit_start) & mask;
5345
5346 self.range.start = chunk_end;
5347 if self.range.start == self.chunks.offset() + chunk.len() {
5348 self.chunks.next().unwrap();
5349 }
5350
5351 Some(Chunk {
5352 text: slice,
5353 syntax_highlight_id: highlight_id,
5354 underline: self.underline,
5355 diagnostic_severity: self.current_diagnostic_severity(),
5356 is_unnecessary: self.current_code_is_unnecessary(),
5357 tabs,
5358 chars,
5359 ..Chunk::default()
5360 })
5361 } else {
5362 None
5363 }
5364 }
5365}
5366
5367impl operation_queue::Operation for Operation {
5368 fn lamport_timestamp(&self) -> clock::Lamport {
5369 match self {
5370 Operation::Buffer(_) => {
5371 unreachable!("buffer operations should never be deferred at this layer")
5372 }
5373 Operation::UpdateDiagnostics {
5374 lamport_timestamp, ..
5375 }
5376 | Operation::UpdateSelections {
5377 lamport_timestamp, ..
5378 }
5379 | Operation::UpdateCompletionTriggers {
5380 lamport_timestamp, ..
5381 }
5382 | Operation::UpdateLineEnding {
5383 lamport_timestamp, ..
5384 } => *lamport_timestamp,
5385 }
5386 }
5387}
5388
5389impl Default for Diagnostic {
5390 fn default() -> Self {
5391 Self {
5392 source: Default::default(),
5393 source_kind: DiagnosticSourceKind::Other,
5394 code: None,
5395 code_description: None,
5396 severity: DiagnosticSeverity::ERROR,
5397 message: Default::default(),
5398 markdown: None,
5399 group_id: 0,
5400 is_primary: false,
5401 is_disk_based: false,
5402 is_unnecessary: false,
5403 underline: true,
5404 data: None,
5405 registration_id: None,
5406 }
5407 }
5408}
5409
5410impl IndentSize {
5411 /// Returns an [`IndentSize`] representing the given spaces.
5412 pub fn spaces(len: u32) -> Self {
5413 Self {
5414 len,
5415 kind: IndentKind::Space,
5416 }
5417 }
5418
5419 /// Returns an [`IndentSize`] representing a tab.
5420 pub fn tab() -> Self {
5421 Self {
5422 len: 1,
5423 kind: IndentKind::Tab,
5424 }
5425 }
5426
5427 /// An iterator over the characters represented by this [`IndentSize`].
5428 pub fn chars(&self) -> impl Iterator<Item = char> {
5429 iter::repeat(self.char()).take(self.len as usize)
5430 }
5431
5432 /// The character representation of this [`IndentSize`].
5433 pub fn char(&self) -> char {
5434 match self.kind {
5435 IndentKind::Space => ' ',
5436 IndentKind::Tab => '\t',
5437 }
5438 }
5439
5440 /// Consumes the current [`IndentSize`] and returns a new one that has
5441 /// been shrunk or enlarged by the given size along the given direction.
5442 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5443 match direction {
5444 Ordering::Less => {
5445 if self.kind == size.kind && self.len >= size.len {
5446 self.len -= size.len;
5447 }
5448 }
5449 Ordering::Equal => {}
5450 Ordering::Greater => {
5451 if self.len == 0 {
5452 self = size;
5453 } else if self.kind == size.kind {
5454 self.len += size.len;
5455 }
5456 }
5457 }
5458 self
5459 }
5460
5461 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5462 match self.kind {
5463 IndentKind::Space => self.len as usize,
5464 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5465 }
5466 }
5467}
5468
5469#[cfg(any(test, feature = "test-support"))]
5470pub struct TestFile {
5471 pub path: Arc<RelPath>,
5472 pub root_name: String,
5473 pub local_root: Option<PathBuf>,
5474}
5475
5476#[cfg(any(test, feature = "test-support"))]
5477impl File for TestFile {
5478 fn path(&self) -> &Arc<RelPath> {
5479 &self.path
5480 }
5481
5482 fn full_path(&self, _: &gpui::App) -> PathBuf {
5483 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5484 }
5485
5486 fn as_local(&self) -> Option<&dyn LocalFile> {
5487 if self.local_root.is_some() {
5488 Some(self)
5489 } else {
5490 None
5491 }
5492 }
5493
5494 fn disk_state(&self) -> DiskState {
5495 unimplemented!()
5496 }
5497
5498 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5499 self.path().file_name().unwrap_or(self.root_name.as_ref())
5500 }
5501
5502 fn worktree_id(&self, _: &App) -> WorktreeId {
5503 WorktreeId::from_usize(0)
5504 }
5505
5506 fn to_proto(&self, _: &App) -> rpc::proto::File {
5507 unimplemented!()
5508 }
5509
5510 fn is_private(&self) -> bool {
5511 false
5512 }
5513
5514 fn path_style(&self, _cx: &App) -> PathStyle {
5515 PathStyle::local()
5516 }
5517}
5518
5519#[cfg(any(test, feature = "test-support"))]
5520impl LocalFile for TestFile {
5521 fn abs_path(&self, _cx: &App) -> PathBuf {
5522 PathBuf::from(self.local_root.as_ref().unwrap())
5523 .join(&self.root_name)
5524 .join(self.path.as_std_path())
5525 }
5526
5527 fn load(&self, _cx: &App) -> Task<Result<String>> {
5528 unimplemented!()
5529 }
5530
5531 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5532 unimplemented!()
5533 }
5534}
5535
5536pub(crate) fn contiguous_ranges(
5537 values: impl Iterator<Item = u32>,
5538 max_len: usize,
5539) -> impl Iterator<Item = Range<u32>> {
5540 let mut values = values;
5541 let mut current_range: Option<Range<u32>> = None;
5542 std::iter::from_fn(move || {
5543 loop {
5544 if let Some(value) = values.next() {
5545 if let Some(range) = &mut current_range
5546 && value == range.end
5547 && range.len() < max_len
5548 {
5549 range.end += 1;
5550 continue;
5551 }
5552
5553 let prev_range = current_range.clone();
5554 current_range = Some(value..(value + 1));
5555 if prev_range.is_some() {
5556 return prev_range;
5557 }
5558 } else {
5559 return current_range.take();
5560 }
5561 }
5562 })
5563}
5564
5565#[derive(Default, Debug)]
5566pub struct CharClassifier {
5567 scope: Option<LanguageScope>,
5568 scope_context: Option<CharScopeContext>,
5569 ignore_punctuation: bool,
5570}
5571
5572impl CharClassifier {
5573 pub fn new(scope: Option<LanguageScope>) -> Self {
5574 Self {
5575 scope,
5576 scope_context: None,
5577 ignore_punctuation: false,
5578 }
5579 }
5580
5581 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5582 Self {
5583 scope_context,
5584 ..self
5585 }
5586 }
5587
5588 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5589 Self {
5590 ignore_punctuation,
5591 ..self
5592 }
5593 }
5594
5595 pub fn is_whitespace(&self, c: char) -> bool {
5596 self.kind(c) == CharKind::Whitespace
5597 }
5598
5599 pub fn is_word(&self, c: char) -> bool {
5600 self.kind(c) == CharKind::Word
5601 }
5602
5603 pub fn is_punctuation(&self, c: char) -> bool {
5604 self.kind(c) == CharKind::Punctuation
5605 }
5606
5607 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5608 if c.is_alphanumeric() || c == '_' {
5609 return CharKind::Word;
5610 }
5611
5612 if let Some(scope) = &self.scope {
5613 let characters = match self.scope_context {
5614 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5615 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5616 None => scope.word_characters(),
5617 };
5618 if let Some(characters) = characters
5619 && characters.contains(&c)
5620 {
5621 return CharKind::Word;
5622 }
5623 }
5624
5625 if c.is_whitespace() {
5626 return CharKind::Whitespace;
5627 }
5628
5629 if ignore_punctuation {
5630 CharKind::Word
5631 } else {
5632 CharKind::Punctuation
5633 }
5634 }
5635
5636 pub fn kind(&self, c: char) -> CharKind {
5637 self.kind_with(c, self.ignore_punctuation)
5638 }
5639}
5640
5641/// Find all of the ranges of whitespace that occur at the ends of lines
5642/// in the given rope.
5643///
5644/// This could also be done with a regex search, but this implementation
5645/// avoids copying text.
5646pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5647 let mut ranges = Vec::new();
5648
5649 let mut offset = 0;
5650 let mut prev_chunk_trailing_whitespace_range = 0..0;
5651 for chunk in rope.chunks() {
5652 let mut prev_line_trailing_whitespace_range = 0..0;
5653 for (i, line) in chunk.split('\n').enumerate() {
5654 let line_end_offset = offset + line.len();
5655 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5656 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5657
5658 if i == 0 && trimmed_line_len == 0 {
5659 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5660 }
5661 if !prev_line_trailing_whitespace_range.is_empty() {
5662 ranges.push(prev_line_trailing_whitespace_range);
5663 }
5664
5665 offset = line_end_offset + 1;
5666 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5667 }
5668
5669 offset -= 1;
5670 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5671 }
5672
5673 if !prev_chunk_trailing_whitespace_range.is_empty() {
5674 ranges.push(prev_chunk_trailing_whitespace_range);
5675 }
5676
5677 ranges
5678}