1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// The ID provided by the dynamic registration that produced this diagnostic.
249 pub registration_id: Option<SharedString>,
250 /// A machine-readable code that identifies this diagnostic.
251 pub code: Option<NumberOrString>,
252 pub code_description: Option<lsp::Uri>,
253 /// Whether this diagnostic is a hint, warning, or error.
254 pub severity: DiagnosticSeverity,
255 /// The human-readable message associated with this diagnostic.
256 pub message: String,
257 /// The human-readable message (in markdown format)
258 pub markdown: Option<String>,
259 /// An id that identifies the group to which this diagnostic belongs.
260 ///
261 /// When a language server produces a diagnostic with
262 /// one or more associated diagnostics, those diagnostics are all
263 /// assigned a single group ID.
264 pub group_id: usize,
265 /// Whether this diagnostic is the primary diagnostic for its group.
266 ///
267 /// In a given group, the primary diagnostic is the top-level diagnostic
268 /// returned by the language server. The non-primary diagnostics are the
269 /// associated diagnostics.
270 pub is_primary: bool,
271 /// Whether this diagnostic is considered to originate from an analysis of
272 /// files on disk, as opposed to any unsaved buffer contents. This is a
273 /// property of a given diagnostic source, and is configured for a given
274 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
275 /// for the language server.
276 pub is_disk_based: bool,
277 /// Whether this diagnostic marks unnecessary code.
278 pub is_unnecessary: bool,
279 /// Quick separation of diagnostics groups based by their source.
280 pub source_kind: DiagnosticSourceKind,
281 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
282 pub data: Option<Value>,
283 /// Whether to underline the corresponding text range in the editor.
284 pub underline: bool,
285}
286
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
288pub enum DiagnosticSourceKind {
289 Pulled,
290 Pushed,
291 Other,
292}
293
294/// An operation used to synchronize this buffer with its other replicas.
295#[derive(Clone, Debug, PartialEq)]
296pub enum Operation {
297 /// A text operation.
298 Buffer(text::Operation),
299
300 /// An update to the buffer's diagnostics.
301 UpdateDiagnostics {
302 /// The id of the language server that produced the new diagnostics.
303 server_id: LanguageServerId,
304 /// The diagnostics.
305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 },
309
310 /// An update to the most recent selections in this buffer.
311 UpdateSelections {
312 /// The selections.
313 selections: Arc<[Selection<Anchor>]>,
314 /// The buffer's lamport timestamp.
315 lamport_timestamp: clock::Lamport,
316 /// Whether the selections are in 'line mode'.
317 line_mode: bool,
318 /// The [`CursorShape`] associated with these selections.
319 cursor_shape: CursorShape,
320 },
321
322 /// An update to the characters that should trigger autocompletion
323 /// for this buffer.
324 UpdateCompletionTriggers {
325 /// The characters that trigger autocompletion.
326 triggers: Vec<String>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// The language server ID.
330 server_id: LanguageServerId,
331 },
332
333 /// An update to the line ending type of this buffer.
334 UpdateLineEnding {
335 /// The line ending type.
336 line_ending: LineEnding,
337 /// The buffer's lamport timestamp.
338 lamport_timestamp: clock::Lamport,
339 },
340}
341
342/// An event that occurs in a buffer.
343#[derive(Clone, Debug, PartialEq)]
344pub enum BufferEvent {
345 /// The buffer was changed in a way that must be
346 /// propagated to its other replicas.
347 Operation {
348 operation: Operation,
349 is_local: bool,
350 },
351 /// The buffer was edited.
352 Edited,
353 /// The buffer's `dirty` bit changed.
354 DirtyChanged,
355 /// The buffer was saved.
356 Saved,
357 /// The buffer's file was changed on disk.
358 FileHandleChanged,
359 /// The buffer was reloaded.
360 Reloaded,
361 /// The buffer is in need of a reload
362 ReloadNeeded,
363 /// The buffer's language was changed.
364 /// The boolean indicates whether this buffer did not have a language before, but does now.
365 LanguageChanged(bool),
366 /// The buffer's syntax trees were updated.
367 Reparsed,
368 /// The buffer's diagnostics were updated.
369 DiagnosticsUpdated,
370 /// The buffer gained or lost editing capabilities.
371 CapabilityChanged,
372}
373
374/// The file associated with a buffer.
375pub trait File: Send + Sync + Any {
376 /// Returns the [`LocalFile`] associated with this file, if the
377 /// file is local.
378 fn as_local(&self) -> Option<&dyn LocalFile>;
379
380 /// Returns whether this file is local.
381 fn is_local(&self) -> bool {
382 self.as_local().is_some()
383 }
384
385 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
386 /// only available in some states, such as modification time.
387 fn disk_state(&self) -> DiskState;
388
389 /// Returns the path of this file relative to the worktree's root directory.
390 fn path(&self) -> &Arc<RelPath>;
391
392 /// Returns the path of this file relative to the worktree's parent directory (this means it
393 /// includes the name of the worktree's root folder).
394 fn full_path(&self, cx: &App) -> PathBuf;
395
396 /// Returns the path style of this file.
397 fn path_style(&self, cx: &App) -> PathStyle;
398
399 /// Returns the last component of this handle's absolute path. If this handle refers to the root
400 /// of its worktree, then this method will return the name of the worktree itself.
401 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
402
403 /// Returns the id of the worktree to which this file belongs.
404 ///
405 /// This is needed for looking up project-specific settings.
406 fn worktree_id(&self, cx: &App) -> WorktreeId;
407
408 /// Converts this file into a protobuf message.
409 fn to_proto(&self, cx: &App) -> rpc::proto::File;
410
411 /// Return whether Zed considers this to be a private file.
412 fn is_private(&self) -> bool;
413}
414
415/// The file's storage status - whether it's stored (`Present`), and if so when it was last
416/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
417/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
418/// indicator for new files.
419#[derive(Copy, Clone, Debug, PartialEq)]
420pub enum DiskState {
421 /// File created in Zed that has not been saved.
422 New,
423 /// File present on the filesystem.
424 Present { mtime: MTime },
425 /// Deleted file that was previously present.
426 Deleted,
427}
428
429impl DiskState {
430 /// Returns the file's last known modification time on disk.
431 pub fn mtime(self) -> Option<MTime> {
432 match self {
433 DiskState::New => None,
434 DiskState::Present { mtime } => Some(mtime),
435 DiskState::Deleted => None,
436 }
437 }
438
439 pub fn exists(&self) -> bool {
440 match self {
441 DiskState::New => false,
442 DiskState::Present { .. } => true,
443 DiskState::Deleted => false,
444 }
445 }
446}
447
448/// The file associated with a buffer, in the case where the file is on the local disk.
449pub trait LocalFile: File {
450 /// Returns the absolute path of this file
451 fn abs_path(&self, cx: &App) -> PathBuf;
452
453 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
454 fn load(&self, cx: &App) -> Task<Result<String>>;
455
456 /// Loads the file's contents from disk.
457 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
458}
459
460/// The auto-indent behavior associated with an editing operation.
461/// For some editing operations, each affected line of text has its
462/// indentation recomputed. For other operations, the entire block
463/// of edited text is adjusted uniformly.
464#[derive(Clone, Debug)]
465pub enum AutoindentMode {
466 /// Indent each line of inserted text.
467 EachLine,
468 /// Apply the same indentation adjustment to all of the lines
469 /// in a given insertion.
470 Block {
471 /// The original indentation column of the first line of each
472 /// insertion, if it has been copied.
473 ///
474 /// Knowing this makes it possible to preserve the relative indentation
475 /// of every line in the insertion from when it was copied.
476 ///
477 /// If the original indent column is `a`, and the first line of insertion
478 /// is then auto-indented to column `b`, then every other line of
479 /// the insertion will be auto-indented to column `b - a`
480 original_indent_columns: Vec<Option<u32>>,
481 },
482}
483
484#[derive(Clone)]
485struct AutoindentRequest {
486 before_edit: BufferSnapshot,
487 entries: Vec<AutoindentRequestEntry>,
488 is_block_mode: bool,
489 ignore_empty_lines: bool,
490}
491
492#[derive(Debug, Clone)]
493struct AutoindentRequestEntry {
494 /// A range of the buffer whose indentation should be adjusted.
495 range: Range<Anchor>,
496 /// Whether or not these lines should be considered brand new, for the
497 /// purpose of auto-indent. When text is not new, its indentation will
498 /// only be adjusted if the suggested indentation level has *changed*
499 /// since the edit was made.
500 first_line_is_new: bool,
501 indent_size: IndentSize,
502 original_indent_column: Option<u32>,
503}
504
505#[derive(Debug)]
506struct IndentSuggestion {
507 basis_row: u32,
508 delta: Ordering,
509 within_error: bool,
510}
511
512struct BufferChunkHighlights<'a> {
513 captures: SyntaxMapCaptures<'a>,
514 next_capture: Option<SyntaxMapCapture<'a>>,
515 stack: Vec<(usize, HighlightId)>,
516 highlight_maps: Vec<HighlightMap>,
517}
518
519/// An iterator that yields chunks of a buffer's text, along with their
520/// syntax highlights and diagnostic status.
521pub struct BufferChunks<'a> {
522 buffer_snapshot: Option<&'a BufferSnapshot>,
523 range: Range<usize>,
524 chunks: text::Chunks<'a>,
525 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
526 error_depth: usize,
527 warning_depth: usize,
528 information_depth: usize,
529 hint_depth: usize,
530 unnecessary_depth: usize,
531 underline: bool,
532 highlights: Option<BufferChunkHighlights<'a>>,
533}
534
535/// A chunk of a buffer's text, along with its syntax highlight and
536/// diagnostic status.
537#[derive(Clone, Debug, Default)]
538pub struct Chunk<'a> {
539 /// The text of the chunk.
540 pub text: &'a str,
541 /// The syntax highlighting style of the chunk.
542 pub syntax_highlight_id: Option<HighlightId>,
543 /// The highlight style that has been applied to this chunk in
544 /// the editor.
545 pub highlight_style: Option<HighlightStyle>,
546 /// The severity of diagnostic associated with this chunk, if any.
547 pub diagnostic_severity: Option<DiagnosticSeverity>,
548 /// A bitset of which characters are tabs in this string.
549 pub tabs: u128,
550 /// Bitmap of character indices in this chunk
551 pub chars: u128,
552 /// Whether this chunk of text is marked as unnecessary.
553 pub is_unnecessary: bool,
554 /// Whether this chunk of text was originally a tab character.
555 pub is_tab: bool,
556 /// Whether this chunk of text was originally an inlay.
557 pub is_inlay: bool,
558 /// Whether to underline the corresponding text range in the editor.
559 pub underline: bool,
560}
561
562/// A set of edits to a given version of a buffer, computed asynchronously.
563#[derive(Debug)]
564pub struct Diff {
565 pub base_version: clock::Global,
566 pub line_ending: LineEnding,
567 pub edits: Vec<(Range<usize>, Arc<str>)>,
568}
569
570#[derive(Debug, Clone, Copy)]
571pub(crate) struct DiagnosticEndpoint {
572 offset: usize,
573 is_start: bool,
574 underline: bool,
575 severity: DiagnosticSeverity,
576 is_unnecessary: bool,
577}
578
579/// A class of characters, used for characterizing a run of text.
580#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
581pub enum CharKind {
582 /// Whitespace.
583 Whitespace,
584 /// Punctuation.
585 Punctuation,
586 /// Word.
587 Word,
588}
589
590/// Context for character classification within a specific scope.
591#[derive(Copy, Clone, Eq, PartialEq, Debug)]
592pub enum CharScopeContext {
593 /// Character classification for completion queries.
594 ///
595 /// This context treats certain characters as word constituents that would
596 /// normally be considered punctuation, such as '-' in Tailwind classes
597 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
598 Completion,
599 /// Character classification for linked edits.
600 ///
601 /// This context handles characters that should be treated as part of
602 /// identifiers during linked editing operations, such as '.' in JSX
603 /// component names like `<Animated.View>`.
604 LinkedEdit,
605}
606
607/// A runnable is a set of data about a region that could be resolved into a task
608pub struct Runnable {
609 pub tags: SmallVec<[RunnableTag; 1]>,
610 pub language: Arc<Language>,
611 pub buffer: BufferId,
612}
613
614#[derive(Default, Clone, Debug)]
615pub struct HighlightedText {
616 pub text: SharedString,
617 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
618}
619
620#[derive(Default, Debug)]
621struct HighlightedTextBuilder {
622 pub text: String,
623 highlights: Vec<(Range<usize>, HighlightStyle)>,
624}
625
626impl HighlightedText {
627 pub fn from_buffer_range<T: ToOffset>(
628 range: Range<T>,
629 snapshot: &text::BufferSnapshot,
630 syntax_snapshot: &SyntaxSnapshot,
631 override_style: Option<HighlightStyle>,
632 syntax_theme: &SyntaxTheme,
633 ) -> Self {
634 let mut highlighted_text = HighlightedTextBuilder::default();
635 highlighted_text.add_text_from_buffer_range(
636 range,
637 snapshot,
638 syntax_snapshot,
639 override_style,
640 syntax_theme,
641 );
642 highlighted_text.build()
643 }
644
645 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
646 gpui::StyledText::new(self.text.clone())
647 .with_default_highlights(default_style, self.highlights.iter().cloned())
648 }
649
650 /// Returns the first line without leading whitespace unless highlighted
651 /// and a boolean indicating if there are more lines after
652 pub fn first_line_preview(self) -> (Self, bool) {
653 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
654 let first_line = &self.text[..newline_ix];
655
656 // Trim leading whitespace, unless an edit starts prior to it.
657 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
658 if let Some((first_highlight_range, _)) = self.highlights.first() {
659 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
660 }
661
662 let preview_text = &first_line[preview_start_ix..];
663 let preview_highlights = self
664 .highlights
665 .into_iter()
666 .skip_while(|(range, _)| range.end <= preview_start_ix)
667 .take_while(|(range, _)| range.start < newline_ix)
668 .filter_map(|(mut range, highlight)| {
669 range.start = range.start.saturating_sub(preview_start_ix);
670 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
671 if range.is_empty() {
672 None
673 } else {
674 Some((range, highlight))
675 }
676 });
677
678 let preview = Self {
679 text: SharedString::new(preview_text),
680 highlights: preview_highlights.collect(),
681 };
682
683 (preview, self.text.len() > newline_ix)
684 }
685}
686
687impl HighlightedTextBuilder {
688 pub fn build(self) -> HighlightedText {
689 HighlightedText {
690 text: self.text.into(),
691 highlights: self.highlights,
692 }
693 }
694
695 pub fn add_text_from_buffer_range<T: ToOffset>(
696 &mut self,
697 range: Range<T>,
698 snapshot: &text::BufferSnapshot,
699 syntax_snapshot: &SyntaxSnapshot,
700 override_style: Option<HighlightStyle>,
701 syntax_theme: &SyntaxTheme,
702 ) {
703 let range = range.to_offset(snapshot);
704 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
705 let start = self.text.len();
706 self.text.push_str(chunk.text);
707 let end = self.text.len();
708
709 if let Some(highlight_style) = chunk
710 .syntax_highlight_id
711 .and_then(|id| id.style(syntax_theme))
712 {
713 let highlight_style = override_style.map_or(highlight_style, |override_style| {
714 highlight_style.highlight(override_style)
715 });
716 self.highlights.push((start..end, highlight_style));
717 } else if let Some(override_style) = override_style {
718 self.highlights.push((start..end, override_style));
719 }
720 }
721 }
722
723 fn highlighted_chunks<'a>(
724 range: Range<usize>,
725 snapshot: &'a text::BufferSnapshot,
726 syntax_snapshot: &'a SyntaxSnapshot,
727 ) -> BufferChunks<'a> {
728 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
729 grammar
730 .highlights_config
731 .as_ref()
732 .map(|config| &config.query)
733 });
734
735 let highlight_maps = captures
736 .grammars()
737 .iter()
738 .map(|grammar| grammar.highlight_map())
739 .collect();
740
741 BufferChunks::new(
742 snapshot.as_rope(),
743 range,
744 Some((captures, highlight_maps)),
745 false,
746 None,
747 )
748 }
749}
750
751#[derive(Clone)]
752pub struct EditPreview {
753 old_snapshot: text::BufferSnapshot,
754 applied_edits_snapshot: text::BufferSnapshot,
755 syntax_snapshot: SyntaxSnapshot,
756}
757
758impl EditPreview {
759 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first.start.to_point(&self.old_snapshot);
764 let old_end = last.end.to_point(&self.old_snapshot);
765 let new_end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 let start = Point::new(start.row.saturating_sub(3), 0);
771 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
772 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
773
774 Some(unified_diff(
775 &self
776 .old_snapshot
777 .text_for_range(start..old_end)
778 .collect::<String>(),
779 &self
780 .applied_edits_snapshot
781 .text_for_range(start..new_end)
782 .collect::<String>(),
783 ))
784 }
785
786 pub fn highlight_edits(
787 &self,
788 current_snapshot: &BufferSnapshot,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 include_deletions: bool,
791 cx: &App,
792 ) -> HighlightedText {
793 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
794 return HighlightedText::default();
795 };
796
797 let mut highlighted_text = HighlightedTextBuilder::default();
798
799 let visible_range_in_preview_snapshot =
800 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
801 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
802
803 let insertion_highlight_style = HighlightStyle {
804 background_color: Some(cx.theme().status().created_background),
805 ..Default::default()
806 };
807 let deletion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().deleted_background),
809 ..Default::default()
810 };
811 let syntax_theme = cx.theme().syntax();
812
813 for (range, edit_text) in edits {
814 let edit_new_end_in_preview_snapshot = range
815 .end
816 .bias_right(&self.old_snapshot)
817 .to_offset(&self.applied_edits_snapshot);
818 let edit_start_in_preview_snapshot =
819 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
820
821 let unchanged_range_in_preview_snapshot =
822 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
823 if !unchanged_range_in_preview_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 unchanged_range_in_preview_snapshot,
826 &self.applied_edits_snapshot,
827 &self.syntax_snapshot,
828 None,
829 syntax_theme,
830 );
831 }
832
833 let range_in_current_snapshot = range.to_offset(current_snapshot);
834 if include_deletions && !range_in_current_snapshot.is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 range_in_current_snapshot,
837 ¤t_snapshot.text,
838 ¤t_snapshot.syntax,
839 Some(deletion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 if !edit_text.as_ref().is_empty() {
845 highlighted_text.add_text_from_buffer_range(
846 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
847 &self.applied_edits_snapshot,
848 &self.syntax_snapshot,
849 Some(insertion_highlight_style),
850 syntax_theme,
851 );
852 }
853
854 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
855 }
856
857 highlighted_text.add_text_from_buffer_range(
858 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
859 &self.applied_edits_snapshot,
860 &self.syntax_snapshot,
861 None,
862 syntax_theme,
863 );
864
865 highlighted_text.build()
866 }
867
868 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
869 cx.new(|cx| {
870 let mut buffer = Buffer::local_normalized(
871 self.applied_edits_snapshot.as_rope().clone(),
872 self.applied_edits_snapshot.line_ending(),
873 cx,
874 );
875 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
876 buffer
877 })
878 }
879
880 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
881 let (first, _) = edits.first()?;
882 let (last, _) = edits.last()?;
883
884 let start = first
885 .start
886 .bias_left(&self.old_snapshot)
887 .to_point(&self.applied_edits_snapshot);
888 let end = last
889 .end
890 .bias_right(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892
893 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
894 let range = Point::new(start.row, 0)
895 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
896
897 Some(range)
898 }
899}
900
901#[derive(Clone, Debug, PartialEq, Eq)]
902pub struct BracketMatch<T> {
903 pub open_range: Range<T>,
904 pub close_range: Range<T>,
905 pub newline_only: bool,
906 pub syntax_layer_depth: usize,
907 pub color_index: Option<usize>,
908}
909
910impl<T> BracketMatch<T> {
911 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
912 (self.open_range, self.close_range)
913 }
914}
915
916impl Buffer {
917 /// Create a new buffer with the given base text.
918 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
919 Self::build(
920 TextBuffer::new(
921 ReplicaId::LOCAL,
922 cx.entity_id().as_non_zero_u64().into(),
923 base_text.into(),
924 ),
925 None,
926 Capability::ReadWrite,
927 )
928 }
929
930 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
931 pub fn local_normalized(
932 base_text_normalized: Rope,
933 line_ending: LineEnding,
934 cx: &Context<Self>,
935 ) -> Self {
936 Self::build(
937 TextBuffer::new_normalized(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 line_ending,
941 base_text_normalized,
942 ),
943 None,
944 Capability::ReadWrite,
945 )
946 }
947
948 /// Create a new buffer that is a replica of a remote buffer.
949 pub fn remote(
950 remote_id: BufferId,
951 replica_id: ReplicaId,
952 capability: Capability,
953 base_text: impl Into<String>,
954 ) -> Self {
955 Self::build(
956 TextBuffer::new(replica_id, remote_id, base_text.into()),
957 None,
958 capability,
959 )
960 }
961
962 /// Create a new buffer that is a replica of a remote buffer, populating its
963 /// state from the given protobuf message.
964 pub fn from_proto(
965 replica_id: ReplicaId,
966 capability: Capability,
967 message: proto::BufferState,
968 file: Option<Arc<dyn File>>,
969 ) -> Result<Self> {
970 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
971 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
972 let mut this = Self::build(buffer, file, capability);
973 this.text.set_line_ending(proto::deserialize_line_ending(
974 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
975 ));
976 this.saved_version = proto::deserialize_version(&message.saved_version);
977 this.saved_mtime = message.saved_mtime.map(|time| time.into());
978 Ok(this)
979 }
980
981 /// Serialize the buffer's state to a protobuf message.
982 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
983 proto::BufferState {
984 id: self.remote_id().into(),
985 file: self.file.as_ref().map(|f| f.to_proto(cx)),
986 base_text: self.base_text().to_string(),
987 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
988 saved_version: proto::serialize_version(&self.saved_version),
989 saved_mtime: self.saved_mtime.map(|time| time.into()),
990 }
991 }
992
993 /// Serialize as protobufs all of the changes to the buffer since the given version.
994 pub fn serialize_ops(
995 &self,
996 since: Option<clock::Global>,
997 cx: &App,
998 ) -> Task<Vec<proto::Operation>> {
999 let mut operations = Vec::new();
1000 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1001
1002 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1003 proto::serialize_operation(&Operation::UpdateSelections {
1004 selections: set.selections.clone(),
1005 lamport_timestamp: set.lamport_timestamp,
1006 line_mode: set.line_mode,
1007 cursor_shape: set.cursor_shape,
1008 })
1009 }));
1010
1011 for (server_id, diagnostics) in &self.diagnostics {
1012 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1013 lamport_timestamp: self.diagnostics_timestamp,
1014 server_id: *server_id,
1015 diagnostics: diagnostics.iter().cloned().collect(),
1016 }));
1017 }
1018
1019 for (server_id, completions) in &self.completion_triggers_per_language_server {
1020 operations.push(proto::serialize_operation(
1021 &Operation::UpdateCompletionTriggers {
1022 triggers: completions.iter().cloned().collect(),
1023 lamport_timestamp: self.completion_triggers_timestamp,
1024 server_id: *server_id,
1025 },
1026 ));
1027 }
1028
1029 let text_operations = self.text.operations().clone();
1030 cx.background_spawn(async move {
1031 let since = since.unwrap_or_default();
1032 operations.extend(
1033 text_operations
1034 .iter()
1035 .filter(|(_, op)| !since.observed(op.timestamp()))
1036 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1037 );
1038 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1039 operations
1040 })
1041 }
1042
1043 /// Assign a language to the buffer, returning the buffer.
1044 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1045 self.set_language_async(Some(language), cx);
1046 self
1047 }
1048
1049 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1050 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1051 self.set_language(Some(language), cx);
1052 self
1053 }
1054
1055 /// Returns the [`Capability`] of this buffer.
1056 pub fn capability(&self) -> Capability {
1057 self.capability
1058 }
1059
1060 /// Whether this buffer can only be read.
1061 pub fn read_only(&self) -> bool {
1062 self.capability == Capability::ReadOnly
1063 }
1064
1065 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1066 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1067 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1068 let snapshot = buffer.snapshot();
1069 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1070 let tree_sitter_data = TreeSitterData::new(snapshot);
1071 Self {
1072 saved_mtime,
1073 tree_sitter_data: Arc::new(tree_sitter_data),
1074 saved_version: buffer.version(),
1075 preview_version: buffer.version(),
1076 reload_task: None,
1077 transaction_depth: 0,
1078 was_dirty_before_starting_transaction: None,
1079 has_unsaved_edits: Cell::new((buffer.version(), false)),
1080 text: buffer,
1081 branch_state: None,
1082 file,
1083 capability,
1084 syntax_map,
1085 reparse: None,
1086 non_text_state_update_count: 0,
1087 sync_parse_timeout: Duration::from_millis(1),
1088 parse_status: watch::channel(ParseStatus::Idle),
1089 autoindent_requests: Default::default(),
1090 wait_for_autoindent_txs: Default::default(),
1091 pending_autoindent: Default::default(),
1092 language: None,
1093 remote_selections: Default::default(),
1094 diagnostics: Default::default(),
1095 diagnostics_timestamp: Lamport::MIN,
1096 completion_triggers: Default::default(),
1097 completion_triggers_per_language_server: Default::default(),
1098 completion_triggers_timestamp: Lamport::MIN,
1099 deferred_ops: OperationQueue::new(),
1100 has_conflict: false,
1101 change_bits: Default::default(),
1102 _subscriptions: Vec::new(),
1103 }
1104 }
1105
1106 pub fn build_snapshot(
1107 text: Rope,
1108 language: Option<Arc<Language>>,
1109 language_registry: Option<Arc<LanguageRegistry>>,
1110 cx: &mut App,
1111 ) -> impl Future<Output = BufferSnapshot> + use<> {
1112 let entity_id = cx.reserve_entity::<Self>().entity_id();
1113 let buffer_id = entity_id.as_non_zero_u64().into();
1114 async move {
1115 let text =
1116 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1117 .snapshot();
1118 let mut syntax = SyntaxMap::new(&text).snapshot();
1119 if let Some(language) = language.clone() {
1120 let language_registry = language_registry.clone();
1121 syntax.reparse(&text, language_registry, language);
1122 }
1123 let tree_sitter_data = TreeSitterData::new(text.clone());
1124 BufferSnapshot {
1125 text,
1126 syntax,
1127 file: None,
1128 diagnostics: Default::default(),
1129 remote_selections: Default::default(),
1130 tree_sitter_data: Arc::new(tree_sitter_data),
1131 language,
1132 non_text_state_update_count: 0,
1133 }
1134 }
1135 }
1136
1137 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1138 let entity_id = cx.reserve_entity::<Self>().entity_id();
1139 let buffer_id = entity_id.as_non_zero_u64().into();
1140 let text = TextBuffer::new_normalized(
1141 ReplicaId::LOCAL,
1142 buffer_id,
1143 Default::default(),
1144 Rope::new(),
1145 )
1146 .snapshot();
1147 let syntax = SyntaxMap::new(&text).snapshot();
1148 let tree_sitter_data = TreeSitterData::new(text.clone());
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(tree_sitter_data),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language: None,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 #[cfg(any(test, feature = "test-support"))]
1162 pub fn build_snapshot_sync(
1163 text: Rope,
1164 language: Option<Arc<Language>>,
1165 language_registry: Option<Arc<LanguageRegistry>>,
1166 cx: &mut App,
1167 ) -> BufferSnapshot {
1168 let entity_id = cx.reserve_entity::<Self>().entity_id();
1169 let buffer_id = entity_id.as_non_zero_u64().into();
1170 let text =
1171 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1172 .snapshot();
1173 let mut syntax = SyntaxMap::new(&text).snapshot();
1174 if let Some(language) = language.clone() {
1175 syntax.reparse(&text, language_registry, language);
1176 }
1177 let tree_sitter_data = TreeSitterData::new(text.clone());
1178 BufferSnapshot {
1179 text,
1180 syntax,
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 file: None,
1183 diagnostics: Default::default(),
1184 remote_selections: Default::default(),
1185 language,
1186 non_text_state_update_count: 0,
1187 }
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's current state. This is computationally
1191 /// cheap, and allows reading from the buffer on a background thread.
1192 pub fn snapshot(&self) -> BufferSnapshot {
1193 let text = self.text.snapshot();
1194 let mut syntax_map = self.syntax_map.lock();
1195 syntax_map.interpolate(&text);
1196 let syntax = syntax_map.snapshot();
1197
1198 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1199 Arc::new(TreeSitterData::new(text.clone()))
1200 } else {
1201 self.tree_sitter_data.clone()
1202 };
1203
1204 BufferSnapshot {
1205 text,
1206 syntax,
1207 tree_sitter_data,
1208 file: self.file.clone(),
1209 remote_selections: self.remote_selections.clone(),
1210 diagnostics: self.diagnostics.clone(),
1211 language: self.language.clone(),
1212 non_text_state_update_count: self.non_text_state_update_count,
1213 }
1214 }
1215
1216 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1217 let this = cx.entity();
1218 cx.new(|cx| {
1219 let mut branch = Self {
1220 branch_state: Some(BufferBranchState {
1221 base_buffer: this.clone(),
1222 merged_operations: Default::default(),
1223 }),
1224 language: self.language.clone(),
1225 has_conflict: self.has_conflict,
1226 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1227 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1228 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1229 };
1230 if let Some(language_registry) = self.language_registry() {
1231 branch.set_language_registry(language_registry);
1232 }
1233
1234 // Reparse the branch buffer so that we get syntax highlighting immediately.
1235 branch.reparse(cx, true);
1236
1237 branch
1238 })
1239 }
1240
1241 pub fn preview_edits(
1242 &self,
1243 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1244 cx: &App,
1245 ) -> Task<EditPreview> {
1246 let registry = self.language_registry();
1247 let language = self.language().cloned();
1248 let old_snapshot = self.text.snapshot();
1249 let mut branch_buffer = self.text.branch();
1250 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1251 cx.background_spawn(async move {
1252 if !edits.is_empty() {
1253 if let Some(language) = language.clone() {
1254 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1255 }
1256
1257 branch_buffer.edit(edits.iter().cloned());
1258 let snapshot = branch_buffer.snapshot();
1259 syntax_snapshot.interpolate(&snapshot);
1260
1261 if let Some(language) = language {
1262 syntax_snapshot.reparse(&snapshot, registry, language);
1263 }
1264 }
1265 EditPreview {
1266 old_snapshot,
1267 applied_edits_snapshot: branch_buffer.snapshot(),
1268 syntax_snapshot,
1269 }
1270 })
1271 }
1272
1273 /// Applies all of the changes in this buffer that intersect any of the
1274 /// given `ranges` to its base buffer.
1275 ///
1276 /// If `ranges` is empty, then all changes will be applied. This buffer must
1277 /// be a branch buffer to call this method.
1278 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1279 let Some(base_buffer) = self.base_buffer() else {
1280 debug_panic!("not a branch buffer");
1281 return;
1282 };
1283
1284 let mut ranges = if ranges.is_empty() {
1285 &[0..usize::MAX]
1286 } else {
1287 ranges.as_slice()
1288 }
1289 .iter()
1290 .peekable();
1291
1292 let mut edits = Vec::new();
1293 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1294 let mut is_included = false;
1295 while let Some(range) = ranges.peek() {
1296 if range.end < edit.new.start {
1297 ranges.next().unwrap();
1298 } else {
1299 if range.start <= edit.new.end {
1300 is_included = true;
1301 }
1302 break;
1303 }
1304 }
1305
1306 if is_included {
1307 edits.push((
1308 edit.old.clone(),
1309 self.text_for_range(edit.new.clone()).collect::<String>(),
1310 ));
1311 }
1312 }
1313
1314 let operation = base_buffer.update(cx, |base_buffer, cx| {
1315 // cx.emit(BufferEvent::DiffBaseChanged);
1316 base_buffer.edit(edits, None, cx)
1317 });
1318
1319 if let Some(operation) = operation
1320 && let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 {
1324 merged_operations.push(operation);
1325 }
1326 }
1327
1328 fn on_base_buffer_event(
1329 &mut self,
1330 _: Entity<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 let BufferEvent::Operation { operation, .. } = event else {
1335 return;
1336 };
1337 let Some(BufferBranchState {
1338 merged_operations, ..
1339 }) = &mut self.branch_state
1340 else {
1341 return;
1342 };
1343
1344 let mut operation_to_undo = None;
1345 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1346 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1347 {
1348 merged_operations.remove(ix);
1349 operation_to_undo = Some(operation.timestamp);
1350 }
1351
1352 self.apply_ops([operation.clone()], cx);
1353
1354 if let Some(timestamp) = operation_to_undo {
1355 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1356 self.undo_operations(counts, cx);
1357 }
1358 }
1359
1360 #[cfg(test)]
1361 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1362 &self.text
1363 }
1364
1365 /// Retrieve a snapshot of the buffer's raw text, without any
1366 /// language-related state like the syntax tree or diagnostics.
1367 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1368 self.text.snapshot()
1369 }
1370
1371 /// The file associated with the buffer, if any.
1372 pub fn file(&self) -> Option<&Arc<dyn File>> {
1373 self.file.as_ref()
1374 }
1375
1376 /// The version of the buffer that was last saved or reloaded from disk.
1377 pub fn saved_version(&self) -> &clock::Global {
1378 &self.saved_version
1379 }
1380
1381 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1382 pub fn saved_mtime(&self) -> Option<MTime> {
1383 self.saved_mtime
1384 }
1385
1386 /// Assign a language to the buffer.
1387 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1388 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1389 }
1390
1391 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1392 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1393 self.set_language_(language, true, cx);
1394 }
1395
1396 fn set_language_(
1397 &mut self,
1398 language: Option<Arc<Language>>,
1399 may_block: bool,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.non_text_state_update_count += 1;
1403 self.syntax_map.lock().clear(&self.text);
1404 let old_language = std::mem::replace(&mut self.language, language);
1405 self.was_changed();
1406 self.reparse(cx, may_block);
1407 let has_fresh_language =
1408 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1409 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1410 }
1411
1412 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1413 /// other languages if parts of the buffer are written in different languages.
1414 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1415 self.syntax_map
1416 .lock()
1417 .set_language_registry(language_registry);
1418 }
1419
1420 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1421 self.syntax_map.lock().language_registry()
1422 }
1423
1424 /// Assign the line ending type to the buffer.
1425 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1426 self.text.set_line_ending(line_ending);
1427
1428 let lamport_timestamp = self.text.lamport_clock.tick();
1429 self.send_operation(
1430 Operation::UpdateLineEnding {
1431 line_ending,
1432 lamport_timestamp,
1433 },
1434 true,
1435 cx,
1436 );
1437 }
1438
1439 /// Assign the buffer a new [`Capability`].
1440 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1441 if self.capability != capability {
1442 self.capability = capability;
1443 cx.emit(BufferEvent::CapabilityChanged)
1444 }
1445 }
1446
1447 /// This method is called to signal that the buffer has been saved.
1448 pub fn did_save(
1449 &mut self,
1450 version: clock::Global,
1451 mtime: Option<MTime>,
1452 cx: &mut Context<Self>,
1453 ) {
1454 self.saved_version = version.clone();
1455 self.has_unsaved_edits.set((version, false));
1456 self.has_conflict = false;
1457 self.saved_mtime = mtime;
1458 self.was_changed();
1459 cx.emit(BufferEvent::Saved);
1460 cx.notify();
1461 }
1462
1463 /// Reloads the contents of the buffer from disk.
1464 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1465 let (tx, rx) = futures::channel::oneshot::channel();
1466 let prev_version = self.text.version();
1467 self.reload_task = Some(cx.spawn(async move |this, cx| {
1468 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1469 let file = this.file.as_ref()?.as_local()?;
1470
1471 Some((file.disk_state().mtime(), file.load(cx)))
1472 })?
1473 else {
1474 return Ok(());
1475 };
1476
1477 let new_text = new_text.await?;
1478 let diff = this
1479 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1480 .await;
1481 this.update(cx, |this, cx| {
1482 if this.version() == diff.base_version {
1483 this.finalize_last_transaction();
1484 this.apply_diff(diff, cx);
1485 tx.send(this.finalize_last_transaction().cloned()).ok();
1486 this.has_conflict = false;
1487 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1488 } else {
1489 if !diff.edits.is_empty()
1490 || this
1491 .edits_since::<usize>(&diff.base_version)
1492 .next()
1493 .is_some()
1494 {
1495 this.has_conflict = true;
1496 }
1497
1498 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1499 }
1500
1501 this.reload_task.take();
1502 })
1503 }));
1504 rx
1505 }
1506
1507 /// This method is called to signal that the buffer has been reloaded.
1508 pub fn did_reload(
1509 &mut self,
1510 version: clock::Global,
1511 line_ending: LineEnding,
1512 mtime: Option<MTime>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 self.saved_version = version;
1516 self.has_unsaved_edits
1517 .set((self.saved_version.clone(), false));
1518 self.text.set_line_ending(line_ending);
1519 self.saved_mtime = mtime;
1520 cx.emit(BufferEvent::Reloaded);
1521 cx.notify();
1522 }
1523
1524 /// Updates the [`File`] backing this buffer. This should be called when
1525 /// the file has changed or has been deleted.
1526 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1527 let was_dirty = self.is_dirty();
1528 let mut file_changed = false;
1529
1530 if let Some(old_file) = self.file.as_ref() {
1531 if new_file.path() != old_file.path() {
1532 file_changed = true;
1533 }
1534
1535 let old_state = old_file.disk_state();
1536 let new_state = new_file.disk_state();
1537 if old_state != new_state {
1538 file_changed = true;
1539 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1540 cx.emit(BufferEvent::ReloadNeeded)
1541 }
1542 }
1543 } else {
1544 file_changed = true;
1545 };
1546
1547 self.file = Some(new_file);
1548 if file_changed {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 if was_dirty != self.is_dirty() {
1552 cx.emit(BufferEvent::DirtyChanged);
1553 }
1554 cx.emit(BufferEvent::FileHandleChanged);
1555 cx.notify();
1556 }
1557 }
1558
1559 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1560 Some(self.branch_state.as_ref()?.base_buffer.clone())
1561 }
1562
1563 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1564 pub fn language(&self) -> Option<&Arc<Language>> {
1565 self.language.as_ref()
1566 }
1567
1568 /// Returns the [`Language`] at the given location.
1569 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1570 let offset = position.to_offset(self);
1571 let mut is_first = true;
1572 let start_anchor = self.anchor_before(offset);
1573 let end_anchor = self.anchor_after(offset);
1574 self.syntax_map
1575 .lock()
1576 .layers_for_range(offset..offset, &self.text, false)
1577 .filter(|layer| {
1578 if is_first {
1579 is_first = false;
1580 return true;
1581 }
1582
1583 layer
1584 .included_sub_ranges
1585 .map(|sub_ranges| {
1586 sub_ranges.iter().any(|sub_range| {
1587 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1588 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1589 !is_before_start && !is_after_end
1590 })
1591 })
1592 .unwrap_or(true)
1593 })
1594 .last()
1595 .map(|info| info.language.clone())
1596 .or_else(|| self.language.clone())
1597 }
1598
1599 /// Returns each [`Language`] for the active syntax layers at the given location.
1600 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1601 let offset = position.to_offset(self);
1602 let mut languages: Vec<Arc<Language>> = self
1603 .syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .map(|info| info.language.clone())
1607 .collect();
1608
1609 if languages.is_empty()
1610 && let Some(buffer_language) = self.language()
1611 {
1612 languages.push(buffer_language.clone());
1613 }
1614
1615 languages
1616 }
1617
1618 /// An integer version number that accounts for all updates besides
1619 /// the buffer's text itself (which is versioned via a version vector).
1620 pub fn non_text_state_update_count(&self) -> usize {
1621 self.non_text_state_update_count
1622 }
1623
1624 /// Whether the buffer is being parsed in the background.
1625 #[cfg(any(test, feature = "test-support"))]
1626 pub fn is_parsing(&self) -> bool {
1627 self.reparse.is_some()
1628 }
1629
1630 /// Indicates whether the buffer contains any regions that may be
1631 /// written in a language that hasn't been loaded yet.
1632 pub fn contains_unknown_injections(&self) -> bool {
1633 self.syntax_map.lock().contains_unknown_injections()
1634 }
1635
1636 #[cfg(any(test, feature = "test-support"))]
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1642 match Arc::get_mut(&mut self.tree_sitter_data) {
1643 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1644 None => {
1645 let tree_sitter_data = TreeSitterData::new(snapshot);
1646 self.tree_sitter_data = Arc::new(tree_sitter_data)
1647 }
1648 }
1649 }
1650
1651 /// Called after an edit to synchronize the buffer's main parse tree with
1652 /// the buffer's new underlying state.
1653 ///
1654 /// Locks the syntax map and interpolates the edits since the last reparse
1655 /// into the foreground syntax tree.
1656 ///
1657 /// Then takes a stable snapshot of the syntax map before unlocking it.
1658 /// The snapshot with the interpolated edits is sent to a background thread,
1659 /// where we ask Tree-sitter to perform an incremental parse.
1660 ///
1661 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1662 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1663 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1664 ///
1665 /// If we time out waiting on the parse, we spawn a second task waiting
1666 /// until the parse does complete and return with the interpolated tree still
1667 /// in the foreground. When the background parse completes, call back into
1668 /// the main thread and assign the foreground parse state.
1669 ///
1670 /// If the buffer or grammar changed since the start of the background parse,
1671 /// initiate an additional reparse recursively. To avoid concurrent parses
1672 /// for the same buffer, we only initiate a new parse if we are not already
1673 /// parsing in the background.
1674 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1675 if self.text.version() != *self.tree_sitter_data.version() {
1676 self.invalidate_tree_sitter_data(self.text.snapshot());
1677 }
1678 if self.reparse.is_some() {
1679 return;
1680 }
1681 let language = if let Some(language) = self.language.clone() {
1682 language
1683 } else {
1684 return;
1685 };
1686
1687 let text = self.text_snapshot();
1688 let parsed_version = self.version();
1689
1690 let mut syntax_map = self.syntax_map.lock();
1691 syntax_map.interpolate(&text);
1692 let language_registry = syntax_map.language_registry();
1693 let mut syntax_snapshot = syntax_map.snapshot();
1694 drop(syntax_map);
1695
1696 let parse_task = cx.background_spawn({
1697 let language = language.clone();
1698 let language_registry = language_registry.clone();
1699 async move {
1700 syntax_snapshot.reparse(&text, language_registry, language);
1701 syntax_snapshot
1702 }
1703 });
1704
1705 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1706 if may_block {
1707 match cx
1708 .background_executor()
1709 .block_with_timeout(self.sync_parse_timeout, parse_task)
1710 {
1711 Ok(new_syntax_snapshot) => {
1712 self.did_finish_parsing(new_syntax_snapshot, cx);
1713 self.reparse = None;
1714 }
1715 Err(parse_task) => {
1716 self.reparse = Some(cx.spawn(async move |this, cx| {
1717 let new_syntax_map = cx.background_spawn(parse_task).await;
1718 this.update(cx, move |this, cx| {
1719 let grammar_changed = || {
1720 this.language.as_ref().is_none_or(|current_language| {
1721 !Arc::ptr_eq(&language, current_language)
1722 })
1723 };
1724 let language_registry_changed = || {
1725 new_syntax_map.contains_unknown_injections()
1726 && language_registry.is_some_and(|registry| {
1727 registry.version()
1728 != new_syntax_map.language_registry_version()
1729 })
1730 };
1731 let parse_again = this.version.changed_since(&parsed_version)
1732 || language_registry_changed()
1733 || grammar_changed();
1734 this.did_finish_parsing(new_syntax_map, cx);
1735 this.reparse = None;
1736 if parse_again {
1737 this.reparse(cx, false);
1738 }
1739 })
1740 .ok();
1741 }));
1742 }
1743 }
1744 } else {
1745 self.reparse = Some(cx.spawn(async move |this, cx| {
1746 let new_syntax_map = cx.background_spawn(parse_task).await;
1747 this.update(cx, move |this, cx| {
1748 let grammar_changed = || {
1749 this.language.as_ref().is_none_or(|current_language| {
1750 !Arc::ptr_eq(&language, current_language)
1751 })
1752 };
1753 let language_registry_changed = || {
1754 new_syntax_map.contains_unknown_injections()
1755 && language_registry.is_some_and(|registry| {
1756 registry.version() != new_syntax_map.language_registry_version()
1757 })
1758 };
1759 let parse_again = this.version.changed_since(&parsed_version)
1760 || language_registry_changed()
1761 || grammar_changed();
1762 this.did_finish_parsing(new_syntax_map, cx);
1763 this.reparse = None;
1764 if parse_again {
1765 this.reparse(cx, false);
1766 }
1767 })
1768 .ok();
1769 }));
1770 }
1771 }
1772
1773 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1774 self.was_changed();
1775 self.non_text_state_update_count += 1;
1776 self.syntax_map.lock().did_parse(syntax_snapshot);
1777 self.request_autoindent(cx);
1778 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1779 if self.text.version() != *self.tree_sitter_data.version() {
1780 self.invalidate_tree_sitter_data(self.text.snapshot());
1781 }
1782 cx.emit(BufferEvent::Reparsed);
1783 cx.notify();
1784 }
1785
1786 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1787 self.parse_status.1.clone()
1788 }
1789
1790 /// Wait until the buffer is no longer parsing
1791 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1792 let mut parse_status = self.parse_status();
1793 async move {
1794 while *parse_status.borrow() != ParseStatus::Idle {
1795 if parse_status.changed().await.is_err() {
1796 break;
1797 }
1798 }
1799 }
1800 }
1801
1802 /// Assign to the buffer a set of diagnostics created by a given language server.
1803 pub fn update_diagnostics(
1804 &mut self,
1805 server_id: LanguageServerId,
1806 diagnostics: DiagnosticSet,
1807 cx: &mut Context<Self>,
1808 ) {
1809 let lamport_timestamp = self.text.lamport_clock.tick();
1810 let op = Operation::UpdateDiagnostics {
1811 server_id,
1812 diagnostics: diagnostics.iter().cloned().collect(),
1813 lamport_timestamp,
1814 };
1815
1816 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1817 self.send_operation(op, true, cx);
1818 }
1819
1820 pub fn buffer_diagnostics(
1821 &self,
1822 for_server: Option<LanguageServerId>,
1823 ) -> Vec<&DiagnosticEntry<Anchor>> {
1824 match for_server {
1825 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1826 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1827 Err(_) => Vec::new(),
1828 },
1829 None => self
1830 .diagnostics
1831 .iter()
1832 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1833 .collect(),
1834 }
1835 }
1836
1837 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1838 if let Some(indent_sizes) = self.compute_autoindents() {
1839 let indent_sizes = cx.background_spawn(indent_sizes);
1840 match cx
1841 .background_executor()
1842 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1843 {
1844 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1845 Err(indent_sizes) => {
1846 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1847 let indent_sizes = indent_sizes.await;
1848 this.update(cx, |this, cx| {
1849 this.apply_autoindents(indent_sizes, cx);
1850 })
1851 .ok();
1852 }));
1853 }
1854 }
1855 } else {
1856 self.autoindent_requests.clear();
1857 for tx in self.wait_for_autoindent_txs.drain(..) {
1858 tx.send(()).ok();
1859 }
1860 }
1861 }
1862
1863 fn compute_autoindents(
1864 &self,
1865 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1866 let max_rows_between_yields = 100;
1867 let snapshot = self.snapshot();
1868 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1869 return None;
1870 }
1871
1872 let autoindent_requests = self.autoindent_requests.clone();
1873 Some(async move {
1874 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1875 for request in autoindent_requests {
1876 // Resolve each edited range to its row in the current buffer and in the
1877 // buffer before this batch of edits.
1878 let mut row_ranges = Vec::new();
1879 let mut old_to_new_rows = BTreeMap::new();
1880 let mut language_indent_sizes_by_new_row = Vec::new();
1881 for entry in &request.entries {
1882 let position = entry.range.start;
1883 let new_row = position.to_point(&snapshot).row;
1884 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1885 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1886
1887 if !entry.first_line_is_new {
1888 let old_row = position.to_point(&request.before_edit).row;
1889 old_to_new_rows.insert(old_row, new_row);
1890 }
1891 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1892 }
1893
1894 // Build a map containing the suggested indentation for each of the edited lines
1895 // with respect to the state of the buffer before these edits. This map is keyed
1896 // by the rows for these lines in the current state of the buffer.
1897 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1898 let old_edited_ranges =
1899 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1900 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1901 let mut language_indent_size = IndentSize::default();
1902 for old_edited_range in old_edited_ranges {
1903 let suggestions = request
1904 .before_edit
1905 .suggest_autoindents(old_edited_range.clone())
1906 .into_iter()
1907 .flatten();
1908 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1909 if let Some(suggestion) = suggestion {
1910 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1911
1912 // Find the indent size based on the language for this row.
1913 while let Some((row, size)) = language_indent_sizes.peek() {
1914 if *row > new_row {
1915 break;
1916 }
1917 language_indent_size = *size;
1918 language_indent_sizes.next();
1919 }
1920
1921 let suggested_indent = old_to_new_rows
1922 .get(&suggestion.basis_row)
1923 .and_then(|from_row| {
1924 Some(old_suggestions.get(from_row).copied()?.0)
1925 })
1926 .unwrap_or_else(|| {
1927 request
1928 .before_edit
1929 .indent_size_for_line(suggestion.basis_row)
1930 })
1931 .with_delta(suggestion.delta, language_indent_size);
1932 old_suggestions
1933 .insert(new_row, (suggested_indent, suggestion.within_error));
1934 }
1935 }
1936 yield_now().await;
1937 }
1938
1939 // Compute new suggestions for each line, but only include them in the result
1940 // if they differ from the old suggestion for that line.
1941 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1942 let mut language_indent_size = IndentSize::default();
1943 for (row_range, original_indent_column) in row_ranges {
1944 let new_edited_row_range = if request.is_block_mode {
1945 row_range.start..row_range.start + 1
1946 } else {
1947 row_range.clone()
1948 };
1949
1950 let suggestions = snapshot
1951 .suggest_autoindents(new_edited_row_range.clone())
1952 .into_iter()
1953 .flatten();
1954 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1955 if let Some(suggestion) = suggestion {
1956 // Find the indent size based on the language for this row.
1957 while let Some((row, size)) = language_indent_sizes.peek() {
1958 if *row > new_row {
1959 break;
1960 }
1961 language_indent_size = *size;
1962 language_indent_sizes.next();
1963 }
1964
1965 let suggested_indent = indent_sizes
1966 .get(&suggestion.basis_row)
1967 .copied()
1968 .map(|e| e.0)
1969 .unwrap_or_else(|| {
1970 snapshot.indent_size_for_line(suggestion.basis_row)
1971 })
1972 .with_delta(suggestion.delta, language_indent_size);
1973
1974 if old_suggestions.get(&new_row).is_none_or(
1975 |(old_indentation, was_within_error)| {
1976 suggested_indent != *old_indentation
1977 && (!suggestion.within_error || *was_within_error)
1978 },
1979 ) {
1980 indent_sizes.insert(
1981 new_row,
1982 (suggested_indent, request.ignore_empty_lines),
1983 );
1984 }
1985 }
1986 }
1987
1988 if let (true, Some(original_indent_column)) =
1989 (request.is_block_mode, original_indent_column)
1990 {
1991 let new_indent =
1992 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1993 *indent
1994 } else {
1995 snapshot.indent_size_for_line(row_range.start)
1996 };
1997 let delta = new_indent.len as i64 - original_indent_column as i64;
1998 if delta != 0 {
1999 for row in row_range.skip(1) {
2000 indent_sizes.entry(row).or_insert_with(|| {
2001 let mut size = snapshot.indent_size_for_line(row);
2002 if size.kind == new_indent.kind {
2003 match delta.cmp(&0) {
2004 Ordering::Greater => size.len += delta as u32,
2005 Ordering::Less => {
2006 size.len = size.len.saturating_sub(-delta as u32)
2007 }
2008 Ordering::Equal => {}
2009 }
2010 }
2011 (size, request.ignore_empty_lines)
2012 });
2013 }
2014 }
2015 }
2016
2017 yield_now().await;
2018 }
2019 }
2020
2021 indent_sizes
2022 .into_iter()
2023 .filter_map(|(row, (indent, ignore_empty_lines))| {
2024 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2025 None
2026 } else {
2027 Some((row, indent))
2028 }
2029 })
2030 .collect()
2031 })
2032 }
2033
2034 fn apply_autoindents(
2035 &mut self,
2036 indent_sizes: BTreeMap<u32, IndentSize>,
2037 cx: &mut Context<Self>,
2038 ) {
2039 self.autoindent_requests.clear();
2040 for tx in self.wait_for_autoindent_txs.drain(..) {
2041 tx.send(()).ok();
2042 }
2043
2044 let edits: Vec<_> = indent_sizes
2045 .into_iter()
2046 .filter_map(|(row, indent_size)| {
2047 let current_size = indent_size_for_line(self, row);
2048 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2049 })
2050 .collect();
2051
2052 let preserve_preview = self.preserve_preview();
2053 self.edit(edits, None, cx);
2054 if preserve_preview {
2055 self.refresh_preview();
2056 }
2057 }
2058
2059 /// Create a minimal edit that will cause the given row to be indented
2060 /// with the given size. After applying this edit, the length of the line
2061 /// will always be at least `new_size.len`.
2062 pub fn edit_for_indent_size_adjustment(
2063 row: u32,
2064 current_size: IndentSize,
2065 new_size: IndentSize,
2066 ) -> Option<(Range<Point>, String)> {
2067 if new_size.kind == current_size.kind {
2068 match new_size.len.cmp(¤t_size.len) {
2069 Ordering::Greater => {
2070 let point = Point::new(row, 0);
2071 Some((
2072 point..point,
2073 iter::repeat(new_size.char())
2074 .take((new_size.len - current_size.len) as usize)
2075 .collect::<String>(),
2076 ))
2077 }
2078
2079 Ordering::Less => Some((
2080 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2081 String::new(),
2082 )),
2083
2084 Ordering::Equal => None,
2085 }
2086 } else {
2087 Some((
2088 Point::new(row, 0)..Point::new(row, current_size.len),
2089 iter::repeat(new_size.char())
2090 .take(new_size.len as usize)
2091 .collect::<String>(),
2092 ))
2093 }
2094 }
2095
2096 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2097 /// and the given new text.
2098 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2099 let old_text = self.as_rope().clone();
2100 let base_version = self.version();
2101 cx.background_executor()
2102 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2103 let old_text = old_text.to_string();
2104 let line_ending = LineEnding::detect(&new_text);
2105 LineEnding::normalize(&mut new_text);
2106 let edits = text_diff(&old_text, &new_text);
2107 Diff {
2108 base_version,
2109 line_ending,
2110 edits,
2111 }
2112 })
2113 }
2114
2115 /// Spawns a background task that searches the buffer for any whitespace
2116 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2117 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2118 let old_text = self.as_rope().clone();
2119 let line_ending = self.line_ending();
2120 let base_version = self.version();
2121 cx.background_spawn(async move {
2122 let ranges = trailing_whitespace_ranges(&old_text);
2123 let empty = Arc::<str>::from("");
2124 Diff {
2125 base_version,
2126 line_ending,
2127 edits: ranges
2128 .into_iter()
2129 .map(|range| (range, empty.clone()))
2130 .collect(),
2131 }
2132 })
2133 }
2134
2135 /// Ensures that the buffer ends with a single newline character, and
2136 /// no other whitespace. Skips if the buffer is empty.
2137 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2138 let len = self.len();
2139 if len == 0 {
2140 return;
2141 }
2142 let mut offset = len;
2143 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2144 let non_whitespace_len = chunk
2145 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2146 .len();
2147 offset -= chunk.len();
2148 offset += non_whitespace_len;
2149 if non_whitespace_len != 0 {
2150 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2151 return;
2152 }
2153 break;
2154 }
2155 }
2156 self.edit([(offset..len, "\n")], None, cx);
2157 }
2158
2159 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2160 /// calculated, then adjust the diff to account for those changes, and discard any
2161 /// parts of the diff that conflict with those changes.
2162 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2163 let snapshot = self.snapshot();
2164 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2165 let mut delta = 0;
2166 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2167 while let Some(edit_since) = edits_since.peek() {
2168 // If the edit occurs after a diff hunk, then it does not
2169 // affect that hunk.
2170 if edit_since.old.start > range.end {
2171 break;
2172 }
2173 // If the edit precedes the diff hunk, then adjust the hunk
2174 // to reflect the edit.
2175 else if edit_since.old.end < range.start {
2176 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2177 edits_since.next();
2178 }
2179 // If the edit intersects a diff hunk, then discard that hunk.
2180 else {
2181 return None;
2182 }
2183 }
2184
2185 let start = (range.start as i64 + delta) as usize;
2186 let end = (range.end as i64 + delta) as usize;
2187 Some((start..end, new_text))
2188 });
2189
2190 self.start_transaction();
2191 self.text.set_line_ending(diff.line_ending);
2192 self.edit(adjusted_edits, None, cx);
2193 self.end_transaction(cx)
2194 }
2195
2196 pub fn has_unsaved_edits(&self) -> bool {
2197 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2198
2199 if last_version == self.version {
2200 self.has_unsaved_edits
2201 .set((last_version, has_unsaved_edits));
2202 return has_unsaved_edits;
2203 }
2204
2205 let has_edits = self.has_edits_since(&self.saved_version);
2206 self.has_unsaved_edits
2207 .set((self.version.clone(), has_edits));
2208 has_edits
2209 }
2210
2211 /// Checks if the buffer has unsaved changes.
2212 pub fn is_dirty(&self) -> bool {
2213 if self.capability == Capability::ReadOnly {
2214 return false;
2215 }
2216 if self.has_conflict {
2217 return true;
2218 }
2219 match self.file.as_ref().map(|f| f.disk_state()) {
2220 Some(DiskState::New) | Some(DiskState::Deleted) => {
2221 !self.is_empty() && self.has_unsaved_edits()
2222 }
2223 _ => self.has_unsaved_edits(),
2224 }
2225 }
2226
2227 /// Marks the buffer as having a conflict regardless of current buffer state.
2228 pub fn set_conflict(&mut self) {
2229 self.has_conflict = true;
2230 }
2231
2232 /// Checks if the buffer and its file have both changed since the buffer
2233 /// was last saved or reloaded.
2234 pub fn has_conflict(&self) -> bool {
2235 if self.has_conflict {
2236 return true;
2237 }
2238 let Some(file) = self.file.as_ref() else {
2239 return false;
2240 };
2241 match file.disk_state() {
2242 DiskState::New => false,
2243 DiskState::Present { mtime } => match self.saved_mtime {
2244 Some(saved_mtime) => {
2245 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2246 }
2247 None => true,
2248 },
2249 DiskState::Deleted => false,
2250 }
2251 }
2252
2253 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2254 pub fn subscribe(&mut self) -> Subscription<usize> {
2255 self.text.subscribe()
2256 }
2257
2258 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2259 ///
2260 /// This allows downstream code to check if the buffer's text has changed without
2261 /// waiting for an effect cycle, which would be required if using eents.
2262 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2263 if let Err(ix) = self
2264 .change_bits
2265 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2266 {
2267 self.change_bits.insert(ix, bit);
2268 }
2269 }
2270
2271 /// Set the change bit for all "listeners".
2272 fn was_changed(&mut self) {
2273 self.change_bits.retain(|change_bit| {
2274 change_bit
2275 .upgrade()
2276 .inspect(|bit| {
2277 _ = bit.replace(true);
2278 })
2279 .is_some()
2280 });
2281 }
2282
2283 /// Starts a transaction, if one is not already in-progress. When undoing or
2284 /// redoing edits, all of the edits performed within a transaction are undone
2285 /// or redone together.
2286 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2287 self.start_transaction_at(Instant::now())
2288 }
2289
2290 /// Starts a transaction, providing the current time. Subsequent transactions
2291 /// that occur within a short period of time will be grouped together. This
2292 /// is controlled by the buffer's undo grouping duration.
2293 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2294 self.transaction_depth += 1;
2295 if self.was_dirty_before_starting_transaction.is_none() {
2296 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2297 }
2298 self.text.start_transaction_at(now)
2299 }
2300
2301 /// Terminates the current transaction, if this is the outermost transaction.
2302 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2303 self.end_transaction_at(Instant::now(), cx)
2304 }
2305
2306 /// Terminates the current transaction, providing the current time. Subsequent transactions
2307 /// that occur within a short period of time will be grouped together. This
2308 /// is controlled by the buffer's undo grouping duration.
2309 pub fn end_transaction_at(
2310 &mut self,
2311 now: Instant,
2312 cx: &mut Context<Self>,
2313 ) -> Option<TransactionId> {
2314 assert!(self.transaction_depth > 0);
2315 self.transaction_depth -= 1;
2316 let was_dirty = if self.transaction_depth == 0 {
2317 self.was_dirty_before_starting_transaction.take().unwrap()
2318 } else {
2319 false
2320 };
2321 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2322 self.did_edit(&start_version, was_dirty, cx);
2323 Some(transaction_id)
2324 } else {
2325 None
2326 }
2327 }
2328
2329 /// Manually add a transaction to the buffer's undo history.
2330 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2331 self.text.push_transaction(transaction, now);
2332 }
2333
2334 /// Differs from `push_transaction` in that it does not clear the redo
2335 /// stack. Intended to be used to create a parent transaction to merge
2336 /// potential child transactions into.
2337 ///
2338 /// The caller is responsible for removing it from the undo history using
2339 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2340 /// are merged into this transaction, the caller is responsible for ensuring
2341 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2342 /// cleared is to create transactions with the usual `start_transaction` and
2343 /// `end_transaction` methods and merging the resulting transactions into
2344 /// the transaction created by this method
2345 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2346 self.text.push_empty_transaction(now)
2347 }
2348
2349 /// Prevent the last transaction from being grouped with any subsequent transactions,
2350 /// even if they occur with the buffer's undo grouping duration.
2351 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2352 self.text.finalize_last_transaction()
2353 }
2354
2355 /// Manually group all changes since a given transaction.
2356 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2357 self.text.group_until_transaction(transaction_id);
2358 }
2359
2360 /// Manually remove a transaction from the buffer's undo history
2361 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2362 self.text.forget_transaction(transaction_id)
2363 }
2364
2365 /// Retrieve a transaction from the buffer's undo history
2366 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2367 self.text.get_transaction(transaction_id)
2368 }
2369
2370 /// Manually merge two transactions in the buffer's undo history.
2371 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2372 self.text.merge_transactions(transaction, destination);
2373 }
2374
2375 /// Waits for the buffer to receive operations with the given timestamps.
2376 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2377 &mut self,
2378 edit_ids: It,
2379 ) -> impl Future<Output = Result<()>> + use<It> {
2380 self.text.wait_for_edits(edit_ids)
2381 }
2382
2383 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2384 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2385 &mut self,
2386 anchors: It,
2387 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2388 self.text.wait_for_anchors(anchors)
2389 }
2390
2391 /// Waits for the buffer to receive operations up to the given version.
2392 pub fn wait_for_version(
2393 &mut self,
2394 version: clock::Global,
2395 ) -> impl Future<Output = Result<()>> + use<> {
2396 self.text.wait_for_version(version)
2397 }
2398
2399 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2400 /// [`Buffer::wait_for_version`] to resolve with an error.
2401 pub fn give_up_waiting(&mut self) {
2402 self.text.give_up_waiting();
2403 }
2404
2405 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2406 let mut rx = None;
2407 if !self.autoindent_requests.is_empty() {
2408 let channel = oneshot::channel();
2409 self.wait_for_autoindent_txs.push(channel.0);
2410 rx = Some(channel.1);
2411 }
2412 rx
2413 }
2414
2415 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2416 pub fn set_active_selections(
2417 &mut self,
2418 selections: Arc<[Selection<Anchor>]>,
2419 line_mode: bool,
2420 cursor_shape: CursorShape,
2421 cx: &mut Context<Self>,
2422 ) {
2423 let lamport_timestamp = self.text.lamport_clock.tick();
2424 self.remote_selections.insert(
2425 self.text.replica_id(),
2426 SelectionSet {
2427 selections: selections.clone(),
2428 lamport_timestamp,
2429 line_mode,
2430 cursor_shape,
2431 },
2432 );
2433 self.send_operation(
2434 Operation::UpdateSelections {
2435 selections,
2436 line_mode,
2437 lamport_timestamp,
2438 cursor_shape,
2439 },
2440 true,
2441 cx,
2442 );
2443 self.non_text_state_update_count += 1;
2444 cx.notify();
2445 }
2446
2447 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2448 /// this replica.
2449 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2450 if self
2451 .remote_selections
2452 .get(&self.text.replica_id())
2453 .is_none_or(|set| !set.selections.is_empty())
2454 {
2455 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2456 }
2457 }
2458
2459 pub fn set_agent_selections(
2460 &mut self,
2461 selections: Arc<[Selection<Anchor>]>,
2462 line_mode: bool,
2463 cursor_shape: CursorShape,
2464 cx: &mut Context<Self>,
2465 ) {
2466 let lamport_timestamp = self.text.lamport_clock.tick();
2467 self.remote_selections.insert(
2468 ReplicaId::AGENT,
2469 SelectionSet {
2470 selections,
2471 lamport_timestamp,
2472 line_mode,
2473 cursor_shape,
2474 },
2475 );
2476 self.non_text_state_update_count += 1;
2477 cx.notify();
2478 }
2479
2480 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2481 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2482 }
2483
2484 /// Replaces the buffer's entire text.
2485 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2486 where
2487 T: Into<Arc<str>>,
2488 {
2489 self.autoindent_requests.clear();
2490 self.edit([(0..self.len(), text)], None, cx)
2491 }
2492
2493 /// Appends the given text to the end of the buffer.
2494 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2495 where
2496 T: Into<Arc<str>>,
2497 {
2498 self.edit([(self.len()..self.len(), text)], None, cx)
2499 }
2500
2501 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2502 /// delete, and a string of text to insert at that location.
2503 ///
2504 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2505 /// request for the edited ranges, which will be processed when the buffer finishes
2506 /// parsing.
2507 ///
2508 /// Parsing takes place at the end of a transaction, and may compute synchronously
2509 /// or asynchronously, depending on the changes.
2510 pub fn edit<I, S, T>(
2511 &mut self,
2512 edits_iter: I,
2513 autoindent_mode: Option<AutoindentMode>,
2514 cx: &mut Context<Self>,
2515 ) -> Option<clock::Lamport>
2516 where
2517 I: IntoIterator<Item = (Range<S>, T)>,
2518 S: ToOffset,
2519 T: Into<Arc<str>>,
2520 {
2521 // Skip invalid edits and coalesce contiguous ones.
2522 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2523
2524 for (range, new_text) in edits_iter {
2525 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2526
2527 if range.start > range.end {
2528 mem::swap(&mut range.start, &mut range.end);
2529 }
2530 let new_text = new_text.into();
2531 if !new_text.is_empty() || !range.is_empty() {
2532 if let Some((prev_range, prev_text)) = edits.last_mut()
2533 && prev_range.end >= range.start
2534 {
2535 prev_range.end = cmp::max(prev_range.end, range.end);
2536 *prev_text = format!("{prev_text}{new_text}").into();
2537 } else {
2538 edits.push((range, new_text));
2539 }
2540 }
2541 }
2542 if edits.is_empty() {
2543 return None;
2544 }
2545
2546 self.start_transaction();
2547 self.pending_autoindent.take();
2548 let autoindent_request = autoindent_mode
2549 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2550
2551 let edit_operation = self.text.edit(edits.iter().cloned());
2552 let edit_id = edit_operation.timestamp();
2553
2554 if let Some((before_edit, mode)) = autoindent_request {
2555 let mut delta = 0isize;
2556 let mut previous_setting = None;
2557 let entries: Vec<_> = edits
2558 .into_iter()
2559 .enumerate()
2560 .zip(&edit_operation.as_edit().unwrap().new_text)
2561 .filter(|((_, (range, _)), _)| {
2562 let language = before_edit.language_at(range.start);
2563 let language_id = language.map(|l| l.id());
2564 if let Some((cached_language_id, auto_indent)) = previous_setting
2565 && cached_language_id == language_id
2566 {
2567 auto_indent
2568 } else {
2569 // The auto-indent setting is not present in editorconfigs, hence
2570 // we can avoid passing the file here.
2571 let auto_indent =
2572 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2573 previous_setting = Some((language_id, auto_indent));
2574 auto_indent
2575 }
2576 })
2577 .map(|((ix, (range, _)), new_text)| {
2578 let new_text_length = new_text.len();
2579 let old_start = range.start.to_point(&before_edit);
2580 let new_start = (delta + range.start as isize) as usize;
2581 let range_len = range.end - range.start;
2582 delta += new_text_length as isize - range_len as isize;
2583
2584 // Decide what range of the insertion to auto-indent, and whether
2585 // the first line of the insertion should be considered a newly-inserted line
2586 // or an edit to an existing line.
2587 let mut range_of_insertion_to_indent = 0..new_text_length;
2588 let mut first_line_is_new = true;
2589
2590 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2591 let old_line_end = before_edit.line_len(old_start.row);
2592
2593 if old_start.column > old_line_start {
2594 first_line_is_new = false;
2595 }
2596
2597 if !new_text.contains('\n')
2598 && (old_start.column + (range_len as u32) < old_line_end
2599 || old_line_end == old_line_start)
2600 {
2601 first_line_is_new = false;
2602 }
2603
2604 // When inserting text starting with a newline, avoid auto-indenting the
2605 // previous line.
2606 if new_text.starts_with('\n') {
2607 range_of_insertion_to_indent.start += 1;
2608 first_line_is_new = true;
2609 }
2610
2611 let mut original_indent_column = None;
2612 if let AutoindentMode::Block {
2613 original_indent_columns,
2614 } = &mode
2615 {
2616 original_indent_column = Some(if new_text.starts_with('\n') {
2617 indent_size_for_text(
2618 new_text[range_of_insertion_to_indent.clone()].chars(),
2619 )
2620 .len
2621 } else {
2622 original_indent_columns
2623 .get(ix)
2624 .copied()
2625 .flatten()
2626 .unwrap_or_else(|| {
2627 indent_size_for_text(
2628 new_text[range_of_insertion_to_indent.clone()].chars(),
2629 )
2630 .len
2631 })
2632 });
2633
2634 // Avoid auto-indenting the line after the edit.
2635 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2636 range_of_insertion_to_indent.end -= 1;
2637 }
2638 }
2639
2640 AutoindentRequestEntry {
2641 first_line_is_new,
2642 original_indent_column,
2643 indent_size: before_edit.language_indent_size_at(range.start, cx),
2644 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2645 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2646 }
2647 })
2648 .collect();
2649
2650 if !entries.is_empty() {
2651 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2652 before_edit,
2653 entries,
2654 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2655 ignore_empty_lines: false,
2656 }));
2657 }
2658 }
2659
2660 self.end_transaction(cx);
2661 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2662 Some(edit_id)
2663 }
2664
2665 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2666 self.was_changed();
2667
2668 if self.edits_since::<usize>(old_version).next().is_none() {
2669 return;
2670 }
2671
2672 self.reparse(cx, true);
2673 cx.emit(BufferEvent::Edited);
2674 if was_dirty != self.is_dirty() {
2675 cx.emit(BufferEvent::DirtyChanged);
2676 }
2677 cx.notify();
2678 }
2679
2680 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2681 where
2682 I: IntoIterator<Item = Range<T>>,
2683 T: ToOffset + Copy,
2684 {
2685 let before_edit = self.snapshot();
2686 let entries = ranges
2687 .into_iter()
2688 .map(|range| AutoindentRequestEntry {
2689 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2690 first_line_is_new: true,
2691 indent_size: before_edit.language_indent_size_at(range.start, cx),
2692 original_indent_column: None,
2693 })
2694 .collect();
2695 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2696 before_edit,
2697 entries,
2698 is_block_mode: false,
2699 ignore_empty_lines: true,
2700 }));
2701 self.request_autoindent(cx);
2702 }
2703
2704 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2705 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2706 pub fn insert_empty_line(
2707 &mut self,
2708 position: impl ToPoint,
2709 space_above: bool,
2710 space_below: bool,
2711 cx: &mut Context<Self>,
2712 ) -> Point {
2713 let mut position = position.to_point(self);
2714
2715 self.start_transaction();
2716
2717 self.edit(
2718 [(position..position, "\n")],
2719 Some(AutoindentMode::EachLine),
2720 cx,
2721 );
2722
2723 if position.column > 0 {
2724 position += Point::new(1, 0);
2725 }
2726
2727 if !self.is_line_blank(position.row) {
2728 self.edit(
2729 [(position..position, "\n")],
2730 Some(AutoindentMode::EachLine),
2731 cx,
2732 );
2733 }
2734
2735 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2736 self.edit(
2737 [(position..position, "\n")],
2738 Some(AutoindentMode::EachLine),
2739 cx,
2740 );
2741 position.row += 1;
2742 }
2743
2744 if space_below
2745 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2746 {
2747 self.edit(
2748 [(position..position, "\n")],
2749 Some(AutoindentMode::EachLine),
2750 cx,
2751 );
2752 }
2753
2754 self.end_transaction(cx);
2755
2756 position
2757 }
2758
2759 /// Applies the given remote operations to the buffer.
2760 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2761 self.pending_autoindent.take();
2762 let was_dirty = self.is_dirty();
2763 let old_version = self.version.clone();
2764 let mut deferred_ops = Vec::new();
2765 let buffer_ops = ops
2766 .into_iter()
2767 .filter_map(|op| match op {
2768 Operation::Buffer(op) => Some(op),
2769 _ => {
2770 if self.can_apply_op(&op) {
2771 self.apply_op(op, cx);
2772 } else {
2773 deferred_ops.push(op);
2774 }
2775 None
2776 }
2777 })
2778 .collect::<Vec<_>>();
2779 for operation in buffer_ops.iter() {
2780 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2781 }
2782 self.text.apply_ops(buffer_ops);
2783 self.deferred_ops.insert(deferred_ops);
2784 self.flush_deferred_ops(cx);
2785 self.did_edit(&old_version, was_dirty, cx);
2786 // Notify independently of whether the buffer was edited as the operations could include a
2787 // selection update.
2788 cx.notify();
2789 }
2790
2791 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2792 let mut deferred_ops = Vec::new();
2793 for op in self.deferred_ops.drain().iter().cloned() {
2794 if self.can_apply_op(&op) {
2795 self.apply_op(op, cx);
2796 } else {
2797 deferred_ops.push(op);
2798 }
2799 }
2800 self.deferred_ops.insert(deferred_ops);
2801 }
2802
2803 pub fn has_deferred_ops(&self) -> bool {
2804 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2805 }
2806
2807 fn can_apply_op(&self, operation: &Operation) -> bool {
2808 match operation {
2809 Operation::Buffer(_) => {
2810 unreachable!("buffer operations should never be applied at this layer")
2811 }
2812 Operation::UpdateDiagnostics {
2813 diagnostics: diagnostic_set,
2814 ..
2815 } => diagnostic_set.iter().all(|diagnostic| {
2816 self.text.can_resolve(&diagnostic.range.start)
2817 && self.text.can_resolve(&diagnostic.range.end)
2818 }),
2819 Operation::UpdateSelections { selections, .. } => selections
2820 .iter()
2821 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2822 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2823 }
2824 }
2825
2826 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2827 match operation {
2828 Operation::Buffer(_) => {
2829 unreachable!("buffer operations should never be applied at this layer")
2830 }
2831 Operation::UpdateDiagnostics {
2832 server_id,
2833 diagnostics: diagnostic_set,
2834 lamport_timestamp,
2835 } => {
2836 let snapshot = self.snapshot();
2837 self.apply_diagnostic_update(
2838 server_id,
2839 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2840 lamport_timestamp,
2841 cx,
2842 );
2843 }
2844 Operation::UpdateSelections {
2845 selections,
2846 lamport_timestamp,
2847 line_mode,
2848 cursor_shape,
2849 } => {
2850 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2851 && set.lamport_timestamp > lamport_timestamp
2852 {
2853 return;
2854 }
2855
2856 self.remote_selections.insert(
2857 lamport_timestamp.replica_id,
2858 SelectionSet {
2859 selections,
2860 lamport_timestamp,
2861 line_mode,
2862 cursor_shape,
2863 },
2864 );
2865 self.text.lamport_clock.observe(lamport_timestamp);
2866 self.non_text_state_update_count += 1;
2867 }
2868 Operation::UpdateCompletionTriggers {
2869 triggers,
2870 lamport_timestamp,
2871 server_id,
2872 } => {
2873 if triggers.is_empty() {
2874 self.completion_triggers_per_language_server
2875 .remove(&server_id);
2876 self.completion_triggers = self
2877 .completion_triggers_per_language_server
2878 .values()
2879 .flat_map(|triggers| triggers.iter().cloned())
2880 .collect();
2881 } else {
2882 self.completion_triggers_per_language_server
2883 .insert(server_id, triggers.iter().cloned().collect());
2884 self.completion_triggers.extend(triggers);
2885 }
2886 self.text.lamport_clock.observe(lamport_timestamp);
2887 }
2888 Operation::UpdateLineEnding {
2889 line_ending,
2890 lamport_timestamp,
2891 } => {
2892 self.text.set_line_ending(line_ending);
2893 self.text.lamport_clock.observe(lamport_timestamp);
2894 }
2895 }
2896 }
2897
2898 fn apply_diagnostic_update(
2899 &mut self,
2900 server_id: LanguageServerId,
2901 diagnostics: DiagnosticSet,
2902 lamport_timestamp: clock::Lamport,
2903 cx: &mut Context<Self>,
2904 ) {
2905 if lamport_timestamp > self.diagnostics_timestamp {
2906 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2907 if diagnostics.is_empty() {
2908 if let Ok(ix) = ix {
2909 self.diagnostics.remove(ix);
2910 }
2911 } else {
2912 match ix {
2913 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2914 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2915 };
2916 }
2917 self.diagnostics_timestamp = lamport_timestamp;
2918 self.non_text_state_update_count += 1;
2919 self.text.lamport_clock.observe(lamport_timestamp);
2920 cx.notify();
2921 cx.emit(BufferEvent::DiagnosticsUpdated);
2922 }
2923 }
2924
2925 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2926 self.was_changed();
2927 cx.emit(BufferEvent::Operation {
2928 operation,
2929 is_local,
2930 });
2931 }
2932
2933 /// Removes the selections for a given peer.
2934 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2935 self.remote_selections.remove(&replica_id);
2936 cx.notify();
2937 }
2938
2939 /// Undoes the most recent transaction.
2940 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2941 let was_dirty = self.is_dirty();
2942 let old_version = self.version.clone();
2943
2944 if let Some((transaction_id, operation)) = self.text.undo() {
2945 self.send_operation(Operation::Buffer(operation), true, cx);
2946 self.did_edit(&old_version, was_dirty, cx);
2947 Some(transaction_id)
2948 } else {
2949 None
2950 }
2951 }
2952
2953 /// Manually undoes a specific transaction in the buffer's undo history.
2954 pub fn undo_transaction(
2955 &mut self,
2956 transaction_id: TransactionId,
2957 cx: &mut Context<Self>,
2958 ) -> bool {
2959 let was_dirty = self.is_dirty();
2960 let old_version = self.version.clone();
2961 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2962 self.send_operation(Operation::Buffer(operation), true, cx);
2963 self.did_edit(&old_version, was_dirty, cx);
2964 true
2965 } else {
2966 false
2967 }
2968 }
2969
2970 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2971 pub fn undo_to_transaction(
2972 &mut self,
2973 transaction_id: TransactionId,
2974 cx: &mut Context<Self>,
2975 ) -> bool {
2976 let was_dirty = self.is_dirty();
2977 let old_version = self.version.clone();
2978
2979 let operations = self.text.undo_to_transaction(transaction_id);
2980 let undone = !operations.is_empty();
2981 for operation in operations {
2982 self.send_operation(Operation::Buffer(operation), true, cx);
2983 }
2984 if undone {
2985 self.did_edit(&old_version, was_dirty, cx)
2986 }
2987 undone
2988 }
2989
2990 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2991 let was_dirty = self.is_dirty();
2992 let operation = self.text.undo_operations(counts);
2993 let old_version = self.version.clone();
2994 self.send_operation(Operation::Buffer(operation), true, cx);
2995 self.did_edit(&old_version, was_dirty, cx);
2996 }
2997
2998 /// Manually redoes a specific transaction in the buffer's redo history.
2999 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3000 let was_dirty = self.is_dirty();
3001 let old_version = self.version.clone();
3002
3003 if let Some((transaction_id, operation)) = self.text.redo() {
3004 self.send_operation(Operation::Buffer(operation), true, cx);
3005 self.did_edit(&old_version, was_dirty, cx);
3006 Some(transaction_id)
3007 } else {
3008 None
3009 }
3010 }
3011
3012 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3013 pub fn redo_to_transaction(
3014 &mut self,
3015 transaction_id: TransactionId,
3016 cx: &mut Context<Self>,
3017 ) -> bool {
3018 let was_dirty = self.is_dirty();
3019 let old_version = self.version.clone();
3020
3021 let operations = self.text.redo_to_transaction(transaction_id);
3022 let redone = !operations.is_empty();
3023 for operation in operations {
3024 self.send_operation(Operation::Buffer(operation), true, cx);
3025 }
3026 if redone {
3027 self.did_edit(&old_version, was_dirty, cx)
3028 }
3029 redone
3030 }
3031
3032 /// Override current completion triggers with the user-provided completion triggers.
3033 pub fn set_completion_triggers(
3034 &mut self,
3035 server_id: LanguageServerId,
3036 triggers: BTreeSet<String>,
3037 cx: &mut Context<Self>,
3038 ) {
3039 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3040 if triggers.is_empty() {
3041 self.completion_triggers_per_language_server
3042 .remove(&server_id);
3043 self.completion_triggers = self
3044 .completion_triggers_per_language_server
3045 .values()
3046 .flat_map(|triggers| triggers.iter().cloned())
3047 .collect();
3048 } else {
3049 self.completion_triggers_per_language_server
3050 .insert(server_id, triggers.clone());
3051 self.completion_triggers.extend(triggers.iter().cloned());
3052 }
3053 self.send_operation(
3054 Operation::UpdateCompletionTriggers {
3055 triggers: triggers.into_iter().collect(),
3056 lamport_timestamp: self.completion_triggers_timestamp,
3057 server_id,
3058 },
3059 true,
3060 cx,
3061 );
3062 cx.notify();
3063 }
3064
3065 /// Returns a list of strings which trigger a completion menu for this language.
3066 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3067 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3068 &self.completion_triggers
3069 }
3070
3071 /// Call this directly after performing edits to prevent the preview tab
3072 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3073 /// to return false until there are additional edits.
3074 pub fn refresh_preview(&mut self) {
3075 self.preview_version = self.version.clone();
3076 }
3077
3078 /// Whether we should preserve the preview status of a tab containing this buffer.
3079 pub fn preserve_preview(&self) -> bool {
3080 !self.has_edits_since(&self.preview_version)
3081 }
3082}
3083
3084#[doc(hidden)]
3085#[cfg(any(test, feature = "test-support"))]
3086impl Buffer {
3087 pub fn edit_via_marked_text(
3088 &mut self,
3089 marked_string: &str,
3090 autoindent_mode: Option<AutoindentMode>,
3091 cx: &mut Context<Self>,
3092 ) {
3093 let edits = self.edits_for_marked_text(marked_string);
3094 self.edit(edits, autoindent_mode, cx);
3095 }
3096
3097 pub fn set_group_interval(&mut self, group_interval: Duration) {
3098 self.text.set_group_interval(group_interval);
3099 }
3100
3101 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3102 where
3103 T: rand::Rng,
3104 {
3105 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3106 let mut last_end = None;
3107 for _ in 0..old_range_count {
3108 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3109 break;
3110 }
3111
3112 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3113 let mut range = self.random_byte_range(new_start, rng);
3114 if rng.random_bool(0.2) {
3115 mem::swap(&mut range.start, &mut range.end);
3116 }
3117 last_end = Some(range.end);
3118
3119 let new_text_len = rng.random_range(0..10);
3120 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3121 new_text = new_text.to_uppercase();
3122
3123 edits.push((range, new_text));
3124 }
3125 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3126 self.edit(edits, None, cx);
3127 }
3128
3129 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3130 let was_dirty = self.is_dirty();
3131 let old_version = self.version.clone();
3132
3133 let ops = self.text.randomly_undo_redo(rng);
3134 if !ops.is_empty() {
3135 for op in ops {
3136 self.send_operation(Operation::Buffer(op), true, cx);
3137 self.did_edit(&old_version, was_dirty, cx);
3138 }
3139 }
3140 }
3141}
3142
3143impl EventEmitter<BufferEvent> for Buffer {}
3144
3145impl Deref for Buffer {
3146 type Target = TextBuffer;
3147
3148 fn deref(&self) -> &Self::Target {
3149 &self.text
3150 }
3151}
3152
3153impl BufferSnapshot {
3154 /// Returns [`IndentSize`] for a given line that respects user settings and
3155 /// language preferences.
3156 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3157 indent_size_for_line(self, row)
3158 }
3159
3160 /// Returns [`IndentSize`] for a given position that respects user settings
3161 /// and language preferences.
3162 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3163 let settings = language_settings(
3164 self.language_at(position).map(|l| l.name()),
3165 self.file(),
3166 cx,
3167 );
3168 if settings.hard_tabs {
3169 IndentSize::tab()
3170 } else {
3171 IndentSize::spaces(settings.tab_size.get())
3172 }
3173 }
3174
3175 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3176 /// is passed in as `single_indent_size`.
3177 pub fn suggested_indents(
3178 &self,
3179 rows: impl Iterator<Item = u32>,
3180 single_indent_size: IndentSize,
3181 ) -> BTreeMap<u32, IndentSize> {
3182 let mut result = BTreeMap::new();
3183
3184 for row_range in contiguous_ranges(rows, 10) {
3185 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3186 Some(suggestions) => suggestions,
3187 _ => break,
3188 };
3189
3190 for (row, suggestion) in row_range.zip(suggestions) {
3191 let indent_size = if let Some(suggestion) = suggestion {
3192 result
3193 .get(&suggestion.basis_row)
3194 .copied()
3195 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3196 .with_delta(suggestion.delta, single_indent_size)
3197 } else {
3198 self.indent_size_for_line(row)
3199 };
3200
3201 result.insert(row, indent_size);
3202 }
3203 }
3204
3205 result
3206 }
3207
3208 fn suggest_autoindents(
3209 &self,
3210 row_range: Range<u32>,
3211 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3212 let config = &self.language.as_ref()?.config;
3213 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3214
3215 #[derive(Debug, Clone)]
3216 struct StartPosition {
3217 start: Point,
3218 suffix: SharedString,
3219 }
3220
3221 // Find the suggested indentation ranges based on the syntax tree.
3222 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3223 let end = Point::new(row_range.end, 0);
3224 let range = (start..end).to_offset(&self.text);
3225 let mut matches = self.syntax.matches_with_options(
3226 range.clone(),
3227 &self.text,
3228 TreeSitterOptions {
3229 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3230 max_start_depth: None,
3231 },
3232 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3233 );
3234 let indent_configs = matches
3235 .grammars()
3236 .iter()
3237 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3238 .collect::<Vec<_>>();
3239
3240 let mut indent_ranges = Vec::<Range<Point>>::new();
3241 let mut start_positions = Vec::<StartPosition>::new();
3242 let mut outdent_positions = Vec::<Point>::new();
3243 while let Some(mat) = matches.peek() {
3244 let mut start: Option<Point> = None;
3245 let mut end: Option<Point> = None;
3246
3247 let config = indent_configs[mat.grammar_index];
3248 for capture in mat.captures {
3249 if capture.index == config.indent_capture_ix {
3250 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3251 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3252 } else if Some(capture.index) == config.start_capture_ix {
3253 start = Some(Point::from_ts_point(capture.node.end_position()));
3254 } else if Some(capture.index) == config.end_capture_ix {
3255 end = Some(Point::from_ts_point(capture.node.start_position()));
3256 } else if Some(capture.index) == config.outdent_capture_ix {
3257 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3258 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3259 start_positions.push(StartPosition {
3260 start: Point::from_ts_point(capture.node.start_position()),
3261 suffix: suffix.clone(),
3262 });
3263 }
3264 }
3265
3266 matches.advance();
3267 if let Some((start, end)) = start.zip(end) {
3268 if start.row == end.row {
3269 continue;
3270 }
3271 let range = start..end;
3272 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3273 Err(ix) => indent_ranges.insert(ix, range),
3274 Ok(ix) => {
3275 let prev_range = &mut indent_ranges[ix];
3276 prev_range.end = prev_range.end.max(range.end);
3277 }
3278 }
3279 }
3280 }
3281
3282 let mut error_ranges = Vec::<Range<Point>>::new();
3283 let mut matches = self
3284 .syntax
3285 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3286 while let Some(mat) = matches.peek() {
3287 let node = mat.captures[0].node;
3288 let start = Point::from_ts_point(node.start_position());
3289 let end = Point::from_ts_point(node.end_position());
3290 let range = start..end;
3291 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3292 Ok(ix) | Err(ix) => ix,
3293 };
3294 let mut end_ix = ix;
3295 while let Some(existing_range) = error_ranges.get(end_ix) {
3296 if existing_range.end < end {
3297 end_ix += 1;
3298 } else {
3299 break;
3300 }
3301 }
3302 error_ranges.splice(ix..end_ix, [range]);
3303 matches.advance();
3304 }
3305
3306 outdent_positions.sort();
3307 for outdent_position in outdent_positions {
3308 // find the innermost indent range containing this outdent_position
3309 // set its end to the outdent position
3310 if let Some(range_to_truncate) = indent_ranges
3311 .iter_mut()
3312 .rfind(|indent_range| indent_range.contains(&outdent_position))
3313 {
3314 range_to_truncate.end = outdent_position;
3315 }
3316 }
3317
3318 start_positions.sort_by_key(|b| b.start);
3319
3320 // Find the suggested indentation increases and decreased based on regexes.
3321 let mut regex_outdent_map = HashMap::default();
3322 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3323 let mut start_positions_iter = start_positions.iter().peekable();
3324
3325 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3326 self.for_each_line(
3327 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3328 ..Point::new(row_range.end, 0),
3329 |row, line| {
3330 if config
3331 .decrease_indent_pattern
3332 .as_ref()
3333 .is_some_and(|regex| regex.is_match(line))
3334 {
3335 indent_change_rows.push((row, Ordering::Less));
3336 }
3337 if config
3338 .increase_indent_pattern
3339 .as_ref()
3340 .is_some_and(|regex| regex.is_match(line))
3341 {
3342 indent_change_rows.push((row + 1, Ordering::Greater));
3343 }
3344 while let Some(pos) = start_positions_iter.peek() {
3345 if pos.start.row < row {
3346 let pos = start_positions_iter.next().unwrap();
3347 last_seen_suffix
3348 .entry(pos.suffix.to_string())
3349 .or_default()
3350 .push(pos.start);
3351 } else {
3352 break;
3353 }
3354 }
3355 for rule in &config.decrease_indent_patterns {
3356 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3357 let row_start_column = self.indent_size_for_line(row).len;
3358 let basis_row = rule
3359 .valid_after
3360 .iter()
3361 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3362 .flatten()
3363 .filter(|start_point| start_point.column <= row_start_column)
3364 .max_by_key(|start_point| start_point.row);
3365 if let Some(outdent_to_row) = basis_row {
3366 regex_outdent_map.insert(row, outdent_to_row.row);
3367 }
3368 break;
3369 }
3370 }
3371 },
3372 );
3373
3374 let mut indent_changes = indent_change_rows.into_iter().peekable();
3375 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3376 prev_non_blank_row.unwrap_or(0)
3377 } else {
3378 row_range.start.saturating_sub(1)
3379 };
3380
3381 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3382 Some(row_range.map(move |row| {
3383 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3384
3385 let mut indent_from_prev_row = false;
3386 let mut outdent_from_prev_row = false;
3387 let mut outdent_to_row = u32::MAX;
3388 let mut from_regex = false;
3389
3390 while let Some((indent_row, delta)) = indent_changes.peek() {
3391 match indent_row.cmp(&row) {
3392 Ordering::Equal => match delta {
3393 Ordering::Less => {
3394 from_regex = true;
3395 outdent_from_prev_row = true
3396 }
3397 Ordering::Greater => {
3398 indent_from_prev_row = true;
3399 from_regex = true
3400 }
3401 _ => {}
3402 },
3403
3404 Ordering::Greater => break,
3405 Ordering::Less => {}
3406 }
3407
3408 indent_changes.next();
3409 }
3410
3411 for range in &indent_ranges {
3412 if range.start.row >= row {
3413 break;
3414 }
3415 if range.start.row == prev_row && range.end > row_start {
3416 indent_from_prev_row = true;
3417 }
3418 if range.end > prev_row_start && range.end <= row_start {
3419 outdent_to_row = outdent_to_row.min(range.start.row);
3420 }
3421 }
3422
3423 if let Some(basis_row) = regex_outdent_map.get(&row) {
3424 indent_from_prev_row = false;
3425 outdent_to_row = *basis_row;
3426 from_regex = true;
3427 }
3428
3429 let within_error = error_ranges
3430 .iter()
3431 .any(|e| e.start.row < row && e.end > row_start);
3432
3433 let suggestion = if outdent_to_row == prev_row
3434 || (outdent_from_prev_row && indent_from_prev_row)
3435 {
3436 Some(IndentSuggestion {
3437 basis_row: prev_row,
3438 delta: Ordering::Equal,
3439 within_error: within_error && !from_regex,
3440 })
3441 } else if indent_from_prev_row {
3442 Some(IndentSuggestion {
3443 basis_row: prev_row,
3444 delta: Ordering::Greater,
3445 within_error: within_error && !from_regex,
3446 })
3447 } else if outdent_to_row < prev_row {
3448 Some(IndentSuggestion {
3449 basis_row: outdent_to_row,
3450 delta: Ordering::Equal,
3451 within_error: within_error && !from_regex,
3452 })
3453 } else if outdent_from_prev_row {
3454 Some(IndentSuggestion {
3455 basis_row: prev_row,
3456 delta: Ordering::Less,
3457 within_error: within_error && !from_regex,
3458 })
3459 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3460 {
3461 Some(IndentSuggestion {
3462 basis_row: prev_row,
3463 delta: Ordering::Equal,
3464 within_error: within_error && !from_regex,
3465 })
3466 } else {
3467 None
3468 };
3469
3470 prev_row = row;
3471 prev_row_start = row_start;
3472 suggestion
3473 }))
3474 }
3475
3476 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3477 while row > 0 {
3478 row -= 1;
3479 if !self.is_line_blank(row) {
3480 return Some(row);
3481 }
3482 }
3483 None
3484 }
3485
3486 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3487 let captures = self.syntax.captures(range, &self.text, |grammar| {
3488 grammar
3489 .highlights_config
3490 .as_ref()
3491 .map(|config| &config.query)
3492 });
3493 let highlight_maps = captures
3494 .grammars()
3495 .iter()
3496 .map(|grammar| grammar.highlight_map())
3497 .collect();
3498 (captures, highlight_maps)
3499 }
3500
3501 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3502 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3503 /// returned in chunks where each chunk has a single syntax highlighting style and
3504 /// diagnostic status.
3505 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3506 let range = range.start.to_offset(self)..range.end.to_offset(self);
3507
3508 let mut syntax = None;
3509 if language_aware {
3510 syntax = Some(self.get_highlights(range.clone()));
3511 }
3512 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3513 let diagnostics = language_aware;
3514 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3515 }
3516
3517 pub fn highlighted_text_for_range<T: ToOffset>(
3518 &self,
3519 range: Range<T>,
3520 override_style: Option<HighlightStyle>,
3521 syntax_theme: &SyntaxTheme,
3522 ) -> HighlightedText {
3523 HighlightedText::from_buffer_range(
3524 range,
3525 &self.text,
3526 &self.syntax,
3527 override_style,
3528 syntax_theme,
3529 )
3530 }
3531
3532 /// Invokes the given callback for each line of text in the given range of the buffer.
3533 /// Uses callback to avoid allocating a string for each line.
3534 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3535 let mut line = String::new();
3536 let mut row = range.start.row;
3537 for chunk in self
3538 .as_rope()
3539 .chunks_in_range(range.to_offset(self))
3540 .chain(["\n"])
3541 {
3542 for (newline_ix, text) in chunk.split('\n').enumerate() {
3543 if newline_ix > 0 {
3544 callback(row, &line);
3545 row += 1;
3546 line.clear();
3547 }
3548 line.push_str(text);
3549 }
3550 }
3551 }
3552
3553 /// Iterates over every [`SyntaxLayer`] in the buffer.
3554 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3555 self.syntax_layers_for_range(0..self.len(), true)
3556 }
3557
3558 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3559 let offset = position.to_offset(self);
3560 self.syntax_layers_for_range(offset..offset, false)
3561 .filter(|l| {
3562 if let Some(ranges) = l.included_sub_ranges {
3563 ranges.iter().any(|range| {
3564 let start = range.start.to_offset(self);
3565 start <= offset && {
3566 let end = range.end.to_offset(self);
3567 offset < end
3568 }
3569 })
3570 } else {
3571 l.node().start_byte() <= offset && l.node().end_byte() > offset
3572 }
3573 })
3574 .last()
3575 }
3576
3577 pub fn syntax_layers_for_range<D: ToOffset>(
3578 &self,
3579 range: Range<D>,
3580 include_hidden: bool,
3581 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3582 self.syntax
3583 .layers_for_range(range, &self.text, include_hidden)
3584 }
3585
3586 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3587 &self,
3588 range: Range<D>,
3589 ) -> Option<SyntaxLayer<'_>> {
3590 let range = range.to_offset(self);
3591 self.syntax
3592 .layers_for_range(range, &self.text, false)
3593 .max_by(|a, b| {
3594 if a.depth != b.depth {
3595 a.depth.cmp(&b.depth)
3596 } else if a.offset.0 != b.offset.0 {
3597 a.offset.0.cmp(&b.offset.0)
3598 } else {
3599 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3600 }
3601 })
3602 }
3603
3604 /// Returns the main [`Language`].
3605 pub fn language(&self) -> Option<&Arc<Language>> {
3606 self.language.as_ref()
3607 }
3608
3609 /// Returns the [`Language`] at the given location.
3610 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3611 self.syntax_layer_at(position)
3612 .map(|info| info.language)
3613 .or(self.language.as_ref())
3614 }
3615
3616 /// Returns the settings for the language at the given location.
3617 pub fn settings_at<'a, D: ToOffset>(
3618 &'a self,
3619 position: D,
3620 cx: &'a App,
3621 ) -> Cow<'a, LanguageSettings> {
3622 language_settings(
3623 self.language_at(position).map(|l| l.name()),
3624 self.file.as_ref(),
3625 cx,
3626 )
3627 }
3628
3629 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3630 CharClassifier::new(self.language_scope_at(point))
3631 }
3632
3633 /// Returns the [`LanguageScope`] at the given location.
3634 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3635 let offset = position.to_offset(self);
3636 let mut scope = None;
3637 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3638
3639 // Use the layer that has the smallest node intersecting the given point.
3640 for layer in self
3641 .syntax
3642 .layers_for_range(offset..offset, &self.text, false)
3643 {
3644 let mut cursor = layer.node().walk();
3645
3646 let mut range = None;
3647 loop {
3648 let child_range = cursor.node().byte_range();
3649 if !child_range.contains(&offset) {
3650 break;
3651 }
3652
3653 range = Some(child_range);
3654 if cursor.goto_first_child_for_byte(offset).is_none() {
3655 break;
3656 }
3657 }
3658
3659 if let Some(range) = range
3660 && smallest_range_and_depth.as_ref().is_none_or(
3661 |(smallest_range, smallest_range_depth)| {
3662 if layer.depth > *smallest_range_depth {
3663 true
3664 } else if layer.depth == *smallest_range_depth {
3665 range.len() < smallest_range.len()
3666 } else {
3667 false
3668 }
3669 },
3670 )
3671 {
3672 smallest_range_and_depth = Some((range, layer.depth));
3673 scope = Some(LanguageScope {
3674 language: layer.language.clone(),
3675 override_id: layer.override_id(offset, &self.text),
3676 });
3677 }
3678 }
3679
3680 scope.or_else(|| {
3681 self.language.clone().map(|language| LanguageScope {
3682 language,
3683 override_id: None,
3684 })
3685 })
3686 }
3687
3688 /// Returns a tuple of the range and character kind of the word
3689 /// surrounding the given position.
3690 pub fn surrounding_word<T: ToOffset>(
3691 &self,
3692 start: T,
3693 scope_context: Option<CharScopeContext>,
3694 ) -> (Range<usize>, Option<CharKind>) {
3695 let mut start = start.to_offset(self);
3696 let mut end = start;
3697 let mut next_chars = self.chars_at(start).take(128).peekable();
3698 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3699
3700 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3701 let word_kind = cmp::max(
3702 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3703 next_chars.peek().copied().map(|c| classifier.kind(c)),
3704 );
3705
3706 for ch in prev_chars {
3707 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3708 start -= ch.len_utf8();
3709 } else {
3710 break;
3711 }
3712 }
3713
3714 for ch in next_chars {
3715 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3716 end += ch.len_utf8();
3717 } else {
3718 break;
3719 }
3720 }
3721
3722 (start..end, word_kind)
3723 }
3724
3725 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3726 /// range. When `require_larger` is true, the node found must be larger than the query range.
3727 ///
3728 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3729 /// be moved to the root of the tree.
3730 fn goto_node_enclosing_range(
3731 cursor: &mut tree_sitter::TreeCursor,
3732 query_range: &Range<usize>,
3733 require_larger: bool,
3734 ) -> bool {
3735 let mut ascending = false;
3736 loop {
3737 let mut range = cursor.node().byte_range();
3738 if query_range.is_empty() {
3739 // When the query range is empty and the current node starts after it, move to the
3740 // previous sibling to find the node the containing node.
3741 if range.start > query_range.start {
3742 cursor.goto_previous_sibling();
3743 range = cursor.node().byte_range();
3744 }
3745 } else {
3746 // When the query range is non-empty and the current node ends exactly at the start,
3747 // move to the next sibling to find a node that extends beyond the start.
3748 if range.end == query_range.start {
3749 cursor.goto_next_sibling();
3750 range = cursor.node().byte_range();
3751 }
3752 }
3753
3754 let encloses = range.contains_inclusive(query_range)
3755 && (!require_larger || range.len() > query_range.len());
3756 if !encloses {
3757 ascending = true;
3758 if !cursor.goto_parent() {
3759 return false;
3760 }
3761 continue;
3762 } else if ascending {
3763 return true;
3764 }
3765
3766 // Descend into the current node.
3767 if cursor
3768 .goto_first_child_for_byte(query_range.start)
3769 .is_none()
3770 {
3771 return true;
3772 }
3773 }
3774 }
3775
3776 pub fn syntax_ancestor<'a, T: ToOffset>(
3777 &'a self,
3778 range: Range<T>,
3779 ) -> Option<tree_sitter::Node<'a>> {
3780 let range = range.start.to_offset(self)..range.end.to_offset(self);
3781 let mut result: Option<tree_sitter::Node<'a>> = None;
3782 for layer in self
3783 .syntax
3784 .layers_for_range(range.clone(), &self.text, true)
3785 {
3786 let mut cursor = layer.node().walk();
3787
3788 // Find the node that both contains the range and is larger than it.
3789 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3790 continue;
3791 }
3792
3793 let left_node = cursor.node();
3794 let mut layer_result = left_node;
3795
3796 // For an empty range, try to find another node immediately to the right of the range.
3797 if left_node.end_byte() == range.start {
3798 let mut right_node = None;
3799 while !cursor.goto_next_sibling() {
3800 if !cursor.goto_parent() {
3801 break;
3802 }
3803 }
3804
3805 while cursor.node().start_byte() == range.start {
3806 right_node = Some(cursor.node());
3807 if !cursor.goto_first_child() {
3808 break;
3809 }
3810 }
3811
3812 // If there is a candidate node on both sides of the (empty) range, then
3813 // decide between the two by favoring a named node over an anonymous token.
3814 // If both nodes are the same in that regard, favor the right one.
3815 if let Some(right_node) = right_node
3816 && (right_node.is_named() || !left_node.is_named())
3817 {
3818 layer_result = right_node;
3819 }
3820 }
3821
3822 if let Some(previous_result) = &result
3823 && previous_result.byte_range().len() < layer_result.byte_range().len()
3824 {
3825 continue;
3826 }
3827 result = Some(layer_result);
3828 }
3829
3830 result
3831 }
3832
3833 /// Find the previous sibling syntax node at the given range.
3834 ///
3835 /// This function locates the syntax node that precedes the node containing
3836 /// the given range. It searches hierarchically by:
3837 /// 1. Finding the node that contains the given range
3838 /// 2. Looking for the previous sibling at the same tree level
3839 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3840 ///
3841 /// Returns `None` if there is no previous sibling at any ancestor level.
3842 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3843 &'a self,
3844 range: Range<T>,
3845 ) -> Option<tree_sitter::Node<'a>> {
3846 let range = range.start.to_offset(self)..range.end.to_offset(self);
3847 let mut result: Option<tree_sitter::Node<'a>> = None;
3848
3849 for layer in self
3850 .syntax
3851 .layers_for_range(range.clone(), &self.text, true)
3852 {
3853 let mut cursor = layer.node().walk();
3854
3855 // Find the node that contains the range
3856 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3857 continue;
3858 }
3859
3860 // Look for the previous sibling, moving up ancestor levels if needed
3861 loop {
3862 if cursor.goto_previous_sibling() {
3863 let layer_result = cursor.node();
3864
3865 if let Some(previous_result) = &result {
3866 if previous_result.byte_range().end < layer_result.byte_range().end {
3867 continue;
3868 }
3869 }
3870 result = Some(layer_result);
3871 break;
3872 }
3873
3874 // No sibling found at this level, try moving up to parent
3875 if !cursor.goto_parent() {
3876 break;
3877 }
3878 }
3879 }
3880
3881 result
3882 }
3883
3884 /// Find the next sibling syntax node at the given range.
3885 ///
3886 /// This function locates the syntax node that follows the node containing
3887 /// the given range. It searches hierarchically by:
3888 /// 1. Finding the node that contains the given range
3889 /// 2. Looking for the next sibling at the same tree level
3890 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3891 ///
3892 /// Returns `None` if there is no next sibling at any ancestor level.
3893 pub fn syntax_next_sibling<'a, T: ToOffset>(
3894 &'a self,
3895 range: Range<T>,
3896 ) -> Option<tree_sitter::Node<'a>> {
3897 let range = range.start.to_offset(self)..range.end.to_offset(self);
3898 let mut result: Option<tree_sitter::Node<'a>> = None;
3899
3900 for layer in self
3901 .syntax
3902 .layers_for_range(range.clone(), &self.text, true)
3903 {
3904 let mut cursor = layer.node().walk();
3905
3906 // Find the node that contains the range
3907 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3908 continue;
3909 }
3910
3911 // Look for the next sibling, moving up ancestor levels if needed
3912 loop {
3913 if cursor.goto_next_sibling() {
3914 let layer_result = cursor.node();
3915
3916 if let Some(previous_result) = &result {
3917 if previous_result.byte_range().start > layer_result.byte_range().start {
3918 continue;
3919 }
3920 }
3921 result = Some(layer_result);
3922 break;
3923 }
3924
3925 // No sibling found at this level, try moving up to parent
3926 if !cursor.goto_parent() {
3927 break;
3928 }
3929 }
3930 }
3931
3932 result
3933 }
3934
3935 /// Returns the root syntax node within the given row
3936 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3937 let start_offset = position.to_offset(self);
3938
3939 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3940
3941 let layer = self
3942 .syntax
3943 .layers_for_range(start_offset..start_offset, &self.text, true)
3944 .next()?;
3945
3946 let mut cursor = layer.node().walk();
3947
3948 // Descend to the first leaf that touches the start of the range.
3949 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3950 if cursor.node().end_byte() == start_offset {
3951 cursor.goto_next_sibling();
3952 }
3953 }
3954
3955 // Ascend to the root node within the same row.
3956 while cursor.goto_parent() {
3957 if cursor.node().start_position().row != row {
3958 break;
3959 }
3960 }
3961
3962 Some(cursor.node())
3963 }
3964
3965 /// Returns the outline for the buffer.
3966 ///
3967 /// This method allows passing an optional [`SyntaxTheme`] to
3968 /// syntax-highlight the returned symbols.
3969 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3970 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3971 }
3972
3973 /// Returns all the symbols that contain the given position.
3974 ///
3975 /// This method allows passing an optional [`SyntaxTheme`] to
3976 /// syntax-highlight the returned symbols.
3977 pub fn symbols_containing<T: ToOffset>(
3978 &self,
3979 position: T,
3980 theme: Option<&SyntaxTheme>,
3981 ) -> Vec<OutlineItem<Anchor>> {
3982 let position = position.to_offset(self);
3983 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3984 let end = self.clip_offset(position + 1, Bias::Right);
3985 let mut items = self.outline_items_containing(start..end, false, theme);
3986 let mut prev_depth = None;
3987 items.retain(|item| {
3988 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3989 prev_depth = Some(item.depth);
3990 result
3991 });
3992 items
3993 }
3994
3995 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3996 let range = range.to_offset(self);
3997 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3998 grammar.outline_config.as_ref().map(|c| &c.query)
3999 });
4000 let configs = matches
4001 .grammars()
4002 .iter()
4003 .map(|g| g.outline_config.as_ref().unwrap())
4004 .collect::<Vec<_>>();
4005
4006 while let Some(mat) = matches.peek() {
4007 let config = &configs[mat.grammar_index];
4008 let containing_item_node = maybe!({
4009 let item_node = mat.captures.iter().find_map(|cap| {
4010 if cap.index == config.item_capture_ix {
4011 Some(cap.node)
4012 } else {
4013 None
4014 }
4015 })?;
4016
4017 let item_byte_range = item_node.byte_range();
4018 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4019 None
4020 } else {
4021 Some(item_node)
4022 }
4023 });
4024
4025 if let Some(item_node) = containing_item_node {
4026 return Some(
4027 Point::from_ts_point(item_node.start_position())
4028 ..Point::from_ts_point(item_node.end_position()),
4029 );
4030 }
4031
4032 matches.advance();
4033 }
4034 None
4035 }
4036
4037 pub fn outline_items_containing<T: ToOffset>(
4038 &self,
4039 range: Range<T>,
4040 include_extra_context: bool,
4041 theme: Option<&SyntaxTheme>,
4042 ) -> Vec<OutlineItem<Anchor>> {
4043 self.outline_items_containing_internal(
4044 range,
4045 include_extra_context,
4046 theme,
4047 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4048 )
4049 }
4050
4051 pub fn outline_items_as_points_containing<T: ToOffset>(
4052 &self,
4053 range: Range<T>,
4054 include_extra_context: bool,
4055 theme: Option<&SyntaxTheme>,
4056 ) -> Vec<OutlineItem<Point>> {
4057 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4058 range
4059 })
4060 }
4061
4062 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4063 &self,
4064 range: Range<T>,
4065 include_extra_context: bool,
4066 theme: Option<&SyntaxTheme>,
4067 ) -> Vec<OutlineItem<usize>> {
4068 self.outline_items_containing_internal(
4069 range,
4070 include_extra_context,
4071 theme,
4072 |buffer, range| range.to_offset(buffer),
4073 )
4074 }
4075
4076 fn outline_items_containing_internal<T: ToOffset, U>(
4077 &self,
4078 range: Range<T>,
4079 include_extra_context: bool,
4080 theme: Option<&SyntaxTheme>,
4081 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4082 ) -> Vec<OutlineItem<U>> {
4083 let range = range.to_offset(self);
4084 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4085 grammar.outline_config.as_ref().map(|c| &c.query)
4086 });
4087
4088 let mut items = Vec::new();
4089 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4090 while let Some(mat) = matches.peek() {
4091 let config = matches.grammars()[mat.grammar_index]
4092 .outline_config
4093 .as_ref()
4094 .unwrap();
4095 if let Some(item) =
4096 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4097 {
4098 items.push(item);
4099 } else if let Some(capture) = mat
4100 .captures
4101 .iter()
4102 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4103 {
4104 let capture_range = capture.node.start_position()..capture.node.end_position();
4105 let mut capture_row_range =
4106 capture_range.start.row as u32..capture_range.end.row as u32;
4107 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4108 {
4109 capture_row_range.end -= 1;
4110 }
4111 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4112 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4113 last_row_range.end = capture_row_range.end;
4114 } else {
4115 annotation_row_ranges.push(capture_row_range);
4116 }
4117 } else {
4118 annotation_row_ranges.push(capture_row_range);
4119 }
4120 }
4121 matches.advance();
4122 }
4123
4124 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4125
4126 // Assign depths based on containment relationships and convert to anchors.
4127 let mut item_ends_stack = Vec::<Point>::new();
4128 let mut anchor_items = Vec::new();
4129 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4130 for item in items {
4131 while let Some(last_end) = item_ends_stack.last().copied() {
4132 if last_end < item.range.end {
4133 item_ends_stack.pop();
4134 } else {
4135 break;
4136 }
4137 }
4138
4139 let mut annotation_row_range = None;
4140 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4141 let row_preceding_item = item.range.start.row.saturating_sub(1);
4142 if next_annotation_row_range.end < row_preceding_item {
4143 annotation_row_ranges.next();
4144 } else {
4145 if next_annotation_row_range.end == row_preceding_item {
4146 annotation_row_range = Some(next_annotation_row_range.clone());
4147 annotation_row_ranges.next();
4148 }
4149 break;
4150 }
4151 }
4152
4153 anchor_items.push(OutlineItem {
4154 depth: item_ends_stack.len(),
4155 range: range_callback(self, item.range.clone()),
4156 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4157 text: item.text,
4158 highlight_ranges: item.highlight_ranges,
4159 name_ranges: item.name_ranges,
4160 body_range: item.body_range.map(|r| range_callback(self, r)),
4161 annotation_range: annotation_row_range.map(|annotation_range| {
4162 let point_range = Point::new(annotation_range.start, 0)
4163 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4164 range_callback(self, point_range)
4165 }),
4166 });
4167 item_ends_stack.push(item.range.end);
4168 }
4169
4170 anchor_items
4171 }
4172
4173 fn next_outline_item(
4174 &self,
4175 config: &OutlineConfig,
4176 mat: &SyntaxMapMatch,
4177 range: &Range<usize>,
4178 include_extra_context: bool,
4179 theme: Option<&SyntaxTheme>,
4180 ) -> Option<OutlineItem<Point>> {
4181 let item_node = mat.captures.iter().find_map(|cap| {
4182 if cap.index == config.item_capture_ix {
4183 Some(cap.node)
4184 } else {
4185 None
4186 }
4187 })?;
4188
4189 let item_byte_range = item_node.byte_range();
4190 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4191 return None;
4192 }
4193 let item_point_range = Point::from_ts_point(item_node.start_position())
4194 ..Point::from_ts_point(item_node.end_position());
4195
4196 let mut open_point = None;
4197 let mut close_point = None;
4198
4199 let mut buffer_ranges = Vec::new();
4200 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4201 let mut range = node.start_byte()..node.end_byte();
4202 let start = node.start_position();
4203 if node.end_position().row > start.row {
4204 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4205 }
4206
4207 if !range.is_empty() {
4208 buffer_ranges.push((range, node_is_name));
4209 }
4210 };
4211
4212 for capture in mat.captures {
4213 if capture.index == config.name_capture_ix {
4214 add_to_buffer_ranges(capture.node, true);
4215 } else if Some(capture.index) == config.context_capture_ix
4216 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4217 {
4218 add_to_buffer_ranges(capture.node, false);
4219 } else {
4220 if Some(capture.index) == config.open_capture_ix {
4221 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4222 } else if Some(capture.index) == config.close_capture_ix {
4223 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4224 }
4225 }
4226 }
4227
4228 if buffer_ranges.is_empty() {
4229 return None;
4230 }
4231 let source_range_for_text =
4232 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4233
4234 let mut text = String::new();
4235 let mut highlight_ranges = Vec::new();
4236 let mut name_ranges = Vec::new();
4237 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4238 let mut last_buffer_range_end = 0;
4239 for (buffer_range, is_name) in buffer_ranges {
4240 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4241 if space_added {
4242 text.push(' ');
4243 }
4244 let before_append_len = text.len();
4245 let mut offset = buffer_range.start;
4246 chunks.seek(buffer_range.clone());
4247 for mut chunk in chunks.by_ref() {
4248 if chunk.text.len() > buffer_range.end - offset {
4249 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4250 offset = buffer_range.end;
4251 } else {
4252 offset += chunk.text.len();
4253 }
4254 let style = chunk
4255 .syntax_highlight_id
4256 .zip(theme)
4257 .and_then(|(highlight, theme)| highlight.style(theme));
4258 if let Some(style) = style {
4259 let start = text.len();
4260 let end = start + chunk.text.len();
4261 highlight_ranges.push((start..end, style));
4262 }
4263 text.push_str(chunk.text);
4264 if offset >= buffer_range.end {
4265 break;
4266 }
4267 }
4268 if is_name {
4269 let after_append_len = text.len();
4270 let start = if space_added && !name_ranges.is_empty() {
4271 before_append_len - 1
4272 } else {
4273 before_append_len
4274 };
4275 name_ranges.push(start..after_append_len);
4276 }
4277 last_buffer_range_end = buffer_range.end;
4278 }
4279
4280 Some(OutlineItem {
4281 depth: 0, // We'll calculate the depth later
4282 range: item_point_range,
4283 source_range_for_text: source_range_for_text.to_point(self),
4284 text,
4285 highlight_ranges,
4286 name_ranges,
4287 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4288 annotation_range: None,
4289 })
4290 }
4291
4292 pub fn function_body_fold_ranges<T: ToOffset>(
4293 &self,
4294 within: Range<T>,
4295 ) -> impl Iterator<Item = Range<usize>> + '_ {
4296 self.text_object_ranges(within, TreeSitterOptions::default())
4297 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4298 }
4299
4300 /// For each grammar in the language, runs the provided
4301 /// [`tree_sitter::Query`] against the given range.
4302 pub fn matches(
4303 &self,
4304 range: Range<usize>,
4305 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4306 ) -> SyntaxMapMatches<'_> {
4307 self.syntax.matches(range, self, query)
4308 }
4309
4310 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4311 /// Hence, may return more bracket pairs than the range contains.
4312 ///
4313 /// Will omit known chunks.
4314 /// The resulting bracket match collections are not ordered.
4315 pub fn fetch_bracket_ranges(
4316 &self,
4317 range: Range<usize>,
4318 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4319 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4320 let mut all_bracket_matches = HashMap::default();
4321
4322 for chunk in self
4323 .tree_sitter_data
4324 .chunks
4325 .applicable_chunks(&[range.to_point(self)])
4326 {
4327 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4328 continue;
4329 }
4330 let chunk_range = chunk.anchor_range();
4331 let chunk_range = chunk_range.to_offset(&self);
4332
4333 if let Some(cached_brackets) =
4334 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4335 {
4336 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4337 continue;
4338 }
4339
4340 let mut all_brackets = Vec::new();
4341 let mut opens = Vec::new();
4342 let mut color_pairs = Vec::new();
4343
4344 let mut matches = self.syntax.matches_with_options(
4345 chunk_range.clone(),
4346 &self.text,
4347 TreeSitterOptions {
4348 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4349 max_start_depth: None,
4350 },
4351 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4352 );
4353 let configs = matches
4354 .grammars()
4355 .iter()
4356 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4357 .collect::<Vec<_>>();
4358
4359 while let Some(mat) = matches.peek() {
4360 let mut open = None;
4361 let mut close = None;
4362 let syntax_layer_depth = mat.depth;
4363 let config = configs[mat.grammar_index];
4364 let pattern = &config.patterns[mat.pattern_index];
4365 for capture in mat.captures {
4366 if capture.index == config.open_capture_ix {
4367 open = Some(capture.node.byte_range());
4368 } else if capture.index == config.close_capture_ix {
4369 close = Some(capture.node.byte_range());
4370 }
4371 }
4372
4373 matches.advance();
4374
4375 let Some((open_range, close_range)) = open.zip(close) else {
4376 continue;
4377 };
4378
4379 let bracket_range = open_range.start..=close_range.end;
4380 if !bracket_range.overlaps(&chunk_range) {
4381 continue;
4382 }
4383
4384 let index = all_brackets.len();
4385 all_brackets.push(BracketMatch {
4386 open_range: open_range.clone(),
4387 close_range: close_range.clone(),
4388 newline_only: pattern.newline_only,
4389 syntax_layer_depth,
4390 color_index: None,
4391 });
4392
4393 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4394 // bracket will match the entire tag with all text inside.
4395 // For now, avoid highlighting any pair that has more than single char in each bracket.
4396 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4397 let should_color =
4398 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4399 if should_color {
4400 opens.push(open_range.clone());
4401 color_pairs.push((open_range, close_range, index));
4402 }
4403 }
4404
4405 opens.sort_by_key(|r| (r.start, r.end));
4406 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4407 color_pairs.sort_by_key(|(_, close, _)| close.end);
4408
4409 let mut open_stack = Vec::new();
4410 let mut open_index = 0;
4411 for (open, close, index) in color_pairs {
4412 while open_index < opens.len() && opens[open_index].start < close.start {
4413 open_stack.push(opens[open_index].clone());
4414 open_index += 1;
4415 }
4416
4417 if open_stack.last() == Some(&open) {
4418 let depth_index = open_stack.len() - 1;
4419 all_brackets[index].color_index = Some(depth_index);
4420 open_stack.pop();
4421 }
4422 }
4423
4424 all_brackets.sort_by_key(|bracket_match| {
4425 (bracket_match.open_range.start, bracket_match.open_range.end)
4426 });
4427
4428 if let empty_slot @ None =
4429 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4430 {
4431 *empty_slot = Some(all_brackets.clone());
4432 }
4433 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4434 }
4435
4436 all_bracket_matches
4437 }
4438
4439 pub fn all_bracket_ranges(
4440 &self,
4441 range: Range<usize>,
4442 ) -> impl Iterator<Item = BracketMatch<usize>> {
4443 self.fetch_bracket_ranges(range.clone(), None)
4444 .into_values()
4445 .flatten()
4446 .filter(move |bracket_match| {
4447 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4448 bracket_range.overlaps(&range)
4449 })
4450 }
4451
4452 /// Returns bracket range pairs overlapping or adjacent to `range`
4453 pub fn bracket_ranges<T: ToOffset>(
4454 &self,
4455 range: Range<T>,
4456 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4457 // Find bracket pairs that *inclusively* contain the given range.
4458 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4459 self.all_bracket_ranges(range)
4460 .filter(|pair| !pair.newline_only)
4461 }
4462
4463 pub fn debug_variables_query<T: ToOffset>(
4464 &self,
4465 range: Range<T>,
4466 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4467 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4468
4469 let mut matches = self.syntax.matches_with_options(
4470 range.clone(),
4471 &self.text,
4472 TreeSitterOptions::default(),
4473 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4474 );
4475
4476 let configs = matches
4477 .grammars()
4478 .iter()
4479 .map(|grammar| grammar.debug_variables_config.as_ref())
4480 .collect::<Vec<_>>();
4481
4482 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4483
4484 iter::from_fn(move || {
4485 loop {
4486 while let Some(capture) = captures.pop() {
4487 if capture.0.overlaps(&range) {
4488 return Some(capture);
4489 }
4490 }
4491
4492 let mat = matches.peek()?;
4493
4494 let Some(config) = configs[mat.grammar_index].as_ref() else {
4495 matches.advance();
4496 continue;
4497 };
4498
4499 for capture in mat.captures {
4500 let Some(ix) = config
4501 .objects_by_capture_ix
4502 .binary_search_by_key(&capture.index, |e| e.0)
4503 .ok()
4504 else {
4505 continue;
4506 };
4507 let text_object = config.objects_by_capture_ix[ix].1;
4508 let byte_range = capture.node.byte_range();
4509
4510 let mut found = false;
4511 for (range, existing) in captures.iter_mut() {
4512 if existing == &text_object {
4513 range.start = range.start.min(byte_range.start);
4514 range.end = range.end.max(byte_range.end);
4515 found = true;
4516 break;
4517 }
4518 }
4519
4520 if !found {
4521 captures.push((byte_range, text_object));
4522 }
4523 }
4524
4525 matches.advance();
4526 }
4527 })
4528 }
4529
4530 pub fn text_object_ranges<T: ToOffset>(
4531 &self,
4532 range: Range<T>,
4533 options: TreeSitterOptions,
4534 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4535 let range =
4536 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4537
4538 let mut matches =
4539 self.syntax
4540 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4541 grammar.text_object_config.as_ref().map(|c| &c.query)
4542 });
4543
4544 let configs = matches
4545 .grammars()
4546 .iter()
4547 .map(|grammar| grammar.text_object_config.as_ref())
4548 .collect::<Vec<_>>();
4549
4550 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4551
4552 iter::from_fn(move || {
4553 loop {
4554 while let Some(capture) = captures.pop() {
4555 if capture.0.overlaps(&range) {
4556 return Some(capture);
4557 }
4558 }
4559
4560 let mat = matches.peek()?;
4561
4562 let Some(config) = configs[mat.grammar_index].as_ref() else {
4563 matches.advance();
4564 continue;
4565 };
4566
4567 for capture in mat.captures {
4568 let Some(ix) = config
4569 .text_objects_by_capture_ix
4570 .binary_search_by_key(&capture.index, |e| e.0)
4571 .ok()
4572 else {
4573 continue;
4574 };
4575 let text_object = config.text_objects_by_capture_ix[ix].1;
4576 let byte_range = capture.node.byte_range();
4577
4578 let mut found = false;
4579 for (range, existing) in captures.iter_mut() {
4580 if existing == &text_object {
4581 range.start = range.start.min(byte_range.start);
4582 range.end = range.end.max(byte_range.end);
4583 found = true;
4584 break;
4585 }
4586 }
4587
4588 if !found {
4589 captures.push((byte_range, text_object));
4590 }
4591 }
4592
4593 matches.advance();
4594 }
4595 })
4596 }
4597
4598 /// Returns enclosing bracket ranges containing the given range
4599 pub fn enclosing_bracket_ranges<T: ToOffset>(
4600 &self,
4601 range: Range<T>,
4602 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4603 let range = range.start.to_offset(self)..range.end.to_offset(self);
4604
4605 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4606 let max_depth = result
4607 .iter()
4608 .map(|mat| mat.syntax_layer_depth)
4609 .max()
4610 .unwrap_or(0);
4611 result.into_iter().filter(move |pair| {
4612 pair.open_range.start <= range.start
4613 && pair.close_range.end >= range.end
4614 && pair.syntax_layer_depth == max_depth
4615 })
4616 }
4617
4618 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4619 ///
4620 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4621 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4622 &self,
4623 range: Range<T>,
4624 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4625 ) -> Option<(Range<usize>, Range<usize>)> {
4626 let range = range.start.to_offset(self)..range.end.to_offset(self);
4627
4628 // Get the ranges of the innermost pair of brackets.
4629 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4630
4631 for pair in self.enclosing_bracket_ranges(range) {
4632 if let Some(range_filter) = range_filter
4633 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4634 {
4635 continue;
4636 }
4637
4638 let len = pair.close_range.end - pair.open_range.start;
4639
4640 if let Some((existing_open, existing_close)) = &result {
4641 let existing_len = existing_close.end - existing_open.start;
4642 if len > existing_len {
4643 continue;
4644 }
4645 }
4646
4647 result = Some((pair.open_range, pair.close_range));
4648 }
4649
4650 result
4651 }
4652
4653 /// Returns anchor ranges for any matches of the redaction query.
4654 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4655 /// will be run on the relevant section of the buffer.
4656 pub fn redacted_ranges<T: ToOffset>(
4657 &self,
4658 range: Range<T>,
4659 ) -> impl Iterator<Item = Range<usize>> + '_ {
4660 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4661 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4662 grammar
4663 .redactions_config
4664 .as_ref()
4665 .map(|config| &config.query)
4666 });
4667
4668 let configs = syntax_matches
4669 .grammars()
4670 .iter()
4671 .map(|grammar| grammar.redactions_config.as_ref())
4672 .collect::<Vec<_>>();
4673
4674 iter::from_fn(move || {
4675 let redacted_range = syntax_matches
4676 .peek()
4677 .and_then(|mat| {
4678 configs[mat.grammar_index].and_then(|config| {
4679 mat.captures
4680 .iter()
4681 .find(|capture| capture.index == config.redaction_capture_ix)
4682 })
4683 })
4684 .map(|mat| mat.node.byte_range());
4685 syntax_matches.advance();
4686 redacted_range
4687 })
4688 }
4689
4690 pub fn injections_intersecting_range<T: ToOffset>(
4691 &self,
4692 range: Range<T>,
4693 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4694 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4695
4696 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4697 grammar
4698 .injection_config
4699 .as_ref()
4700 .map(|config| &config.query)
4701 });
4702
4703 let configs = syntax_matches
4704 .grammars()
4705 .iter()
4706 .map(|grammar| grammar.injection_config.as_ref())
4707 .collect::<Vec<_>>();
4708
4709 iter::from_fn(move || {
4710 let ranges = syntax_matches.peek().and_then(|mat| {
4711 let config = &configs[mat.grammar_index]?;
4712 let content_capture_range = mat.captures.iter().find_map(|capture| {
4713 if capture.index == config.content_capture_ix {
4714 Some(capture.node.byte_range())
4715 } else {
4716 None
4717 }
4718 })?;
4719 let language = self.language_at(content_capture_range.start)?;
4720 Some((content_capture_range, language))
4721 });
4722 syntax_matches.advance();
4723 ranges
4724 })
4725 }
4726
4727 pub fn runnable_ranges(
4728 &self,
4729 offset_range: Range<usize>,
4730 ) -> impl Iterator<Item = RunnableRange> + '_ {
4731 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4732 grammar.runnable_config.as_ref().map(|config| &config.query)
4733 });
4734
4735 let test_configs = syntax_matches
4736 .grammars()
4737 .iter()
4738 .map(|grammar| grammar.runnable_config.as_ref())
4739 .collect::<Vec<_>>();
4740
4741 iter::from_fn(move || {
4742 loop {
4743 let mat = syntax_matches.peek()?;
4744
4745 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4746 let mut run_range = None;
4747 let full_range = mat.captures.iter().fold(
4748 Range {
4749 start: usize::MAX,
4750 end: 0,
4751 },
4752 |mut acc, next| {
4753 let byte_range = next.node.byte_range();
4754 if acc.start > byte_range.start {
4755 acc.start = byte_range.start;
4756 }
4757 if acc.end < byte_range.end {
4758 acc.end = byte_range.end;
4759 }
4760 acc
4761 },
4762 );
4763 if full_range.start > full_range.end {
4764 // We did not find a full spanning range of this match.
4765 return None;
4766 }
4767 let extra_captures: SmallVec<[_; 1]> =
4768 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4769 test_configs
4770 .extra_captures
4771 .get(capture.index as usize)
4772 .cloned()
4773 .and_then(|tag_name| match tag_name {
4774 RunnableCapture::Named(name) => {
4775 Some((capture.node.byte_range(), name))
4776 }
4777 RunnableCapture::Run => {
4778 let _ = run_range.insert(capture.node.byte_range());
4779 None
4780 }
4781 })
4782 }));
4783 let run_range = run_range?;
4784 let tags = test_configs
4785 .query
4786 .property_settings(mat.pattern_index)
4787 .iter()
4788 .filter_map(|property| {
4789 if *property.key == *"tag" {
4790 property
4791 .value
4792 .as_ref()
4793 .map(|value| RunnableTag(value.to_string().into()))
4794 } else {
4795 None
4796 }
4797 })
4798 .collect();
4799 let extra_captures = extra_captures
4800 .into_iter()
4801 .map(|(range, name)| {
4802 (
4803 name.to_string(),
4804 self.text_for_range(range).collect::<String>(),
4805 )
4806 })
4807 .collect();
4808 // All tags should have the same range.
4809 Some(RunnableRange {
4810 run_range,
4811 full_range,
4812 runnable: Runnable {
4813 tags,
4814 language: mat.language,
4815 buffer: self.remote_id(),
4816 },
4817 extra_captures,
4818 buffer_id: self.remote_id(),
4819 })
4820 });
4821
4822 syntax_matches.advance();
4823 if test_range.is_some() {
4824 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4825 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4826 return test_range;
4827 }
4828 }
4829 })
4830 }
4831
4832 /// Returns selections for remote peers intersecting the given range.
4833 #[allow(clippy::type_complexity)]
4834 pub fn selections_in_range(
4835 &self,
4836 range: Range<Anchor>,
4837 include_local: bool,
4838 ) -> impl Iterator<
4839 Item = (
4840 ReplicaId,
4841 bool,
4842 CursorShape,
4843 impl Iterator<Item = &Selection<Anchor>> + '_,
4844 ),
4845 > + '_ {
4846 self.remote_selections
4847 .iter()
4848 .filter(move |(replica_id, set)| {
4849 (include_local || **replica_id != self.text.replica_id())
4850 && !set.selections.is_empty()
4851 })
4852 .map(move |(replica_id, set)| {
4853 let start_ix = match set.selections.binary_search_by(|probe| {
4854 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4855 }) {
4856 Ok(ix) | Err(ix) => ix,
4857 };
4858 let end_ix = match set.selections.binary_search_by(|probe| {
4859 probe.start.cmp(&range.end, self).then(Ordering::Less)
4860 }) {
4861 Ok(ix) | Err(ix) => ix,
4862 };
4863
4864 (
4865 *replica_id,
4866 set.line_mode,
4867 set.cursor_shape,
4868 set.selections[start_ix..end_ix].iter(),
4869 )
4870 })
4871 }
4872
4873 /// Returns if the buffer contains any diagnostics.
4874 pub fn has_diagnostics(&self) -> bool {
4875 !self.diagnostics.is_empty()
4876 }
4877
4878 /// Returns all the diagnostics intersecting the given range.
4879 pub fn diagnostics_in_range<'a, T, O>(
4880 &'a self,
4881 search_range: Range<T>,
4882 reversed: bool,
4883 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4884 where
4885 T: 'a + Clone + ToOffset,
4886 O: 'a + FromAnchor,
4887 {
4888 let mut iterators: Vec<_> = self
4889 .diagnostics
4890 .iter()
4891 .map(|(_, collection)| {
4892 collection
4893 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4894 .peekable()
4895 })
4896 .collect();
4897
4898 std::iter::from_fn(move || {
4899 let (next_ix, _) = iterators
4900 .iter_mut()
4901 .enumerate()
4902 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4903 .min_by(|(_, a), (_, b)| {
4904 let cmp = a
4905 .range
4906 .start
4907 .cmp(&b.range.start, self)
4908 // when range is equal, sort by diagnostic severity
4909 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4910 // and stabilize order with group_id
4911 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4912 if reversed { cmp.reverse() } else { cmp }
4913 })?;
4914 iterators[next_ix]
4915 .next()
4916 .map(
4917 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4918 diagnostic,
4919 range: FromAnchor::from_anchor(&range.start, self)
4920 ..FromAnchor::from_anchor(&range.end, self),
4921 },
4922 )
4923 })
4924 }
4925
4926 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4927 /// should be used instead.
4928 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4929 &self.diagnostics
4930 }
4931
4932 /// Returns all the diagnostic groups associated with the given
4933 /// language server ID. If no language server ID is provided,
4934 /// all diagnostics groups are returned.
4935 pub fn diagnostic_groups(
4936 &self,
4937 language_server_id: Option<LanguageServerId>,
4938 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4939 let mut groups = Vec::new();
4940
4941 if let Some(language_server_id) = language_server_id {
4942 if let Ok(ix) = self
4943 .diagnostics
4944 .binary_search_by_key(&language_server_id, |e| e.0)
4945 {
4946 self.diagnostics[ix]
4947 .1
4948 .groups(language_server_id, &mut groups, self);
4949 }
4950 } else {
4951 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4952 diagnostics.groups(*language_server_id, &mut groups, self);
4953 }
4954 }
4955
4956 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4957 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4958 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4959 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4960 });
4961
4962 groups
4963 }
4964
4965 /// Returns an iterator over the diagnostics for the given group.
4966 pub fn diagnostic_group<O>(
4967 &self,
4968 group_id: usize,
4969 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4970 where
4971 O: FromAnchor + 'static,
4972 {
4973 self.diagnostics
4974 .iter()
4975 .flat_map(move |(_, set)| set.group(group_id, self))
4976 }
4977
4978 /// An integer version number that accounts for all updates besides
4979 /// the buffer's text itself (which is versioned via a version vector).
4980 pub fn non_text_state_update_count(&self) -> usize {
4981 self.non_text_state_update_count
4982 }
4983
4984 /// An integer version that changes when the buffer's syntax changes.
4985 pub fn syntax_update_count(&self) -> usize {
4986 self.syntax.update_count()
4987 }
4988
4989 /// Returns a snapshot of underlying file.
4990 pub fn file(&self) -> Option<&Arc<dyn File>> {
4991 self.file.as_ref()
4992 }
4993
4994 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4995 if let Some(file) = self.file() {
4996 if file.path().file_name().is_none() || include_root {
4997 Some(file.full_path(cx).to_string_lossy().into_owned())
4998 } else {
4999 Some(file.path().display(file.path_style(cx)).to_string())
5000 }
5001 } else {
5002 None
5003 }
5004 }
5005
5006 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5007 let query_str = query.fuzzy_contents;
5008 if query_str.is_some_and(|query| query.is_empty()) {
5009 return BTreeMap::default();
5010 }
5011
5012 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5013 language,
5014 override_id: None,
5015 }));
5016
5017 let mut query_ix = 0;
5018 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5019 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5020
5021 let mut words = BTreeMap::default();
5022 let mut current_word_start_ix = None;
5023 let mut chunk_ix = query.range.start;
5024 for chunk in self.chunks(query.range, false) {
5025 for (i, c) in chunk.text.char_indices() {
5026 let ix = chunk_ix + i;
5027 if classifier.is_word(c) {
5028 if current_word_start_ix.is_none() {
5029 current_word_start_ix = Some(ix);
5030 }
5031
5032 if let Some(query_chars) = &query_chars
5033 && query_ix < query_len
5034 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5035 {
5036 query_ix += 1;
5037 }
5038 continue;
5039 } else if let Some(word_start) = current_word_start_ix.take()
5040 && query_ix == query_len
5041 {
5042 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5043 let mut word_text = self.text_for_range(word_start..ix).peekable();
5044 let first_char = word_text
5045 .peek()
5046 .and_then(|first_chunk| first_chunk.chars().next());
5047 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5048 if !query.skip_digits
5049 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5050 {
5051 words.insert(word_text.collect(), word_range);
5052 }
5053 }
5054 query_ix = 0;
5055 }
5056 chunk_ix += chunk.text.len();
5057 }
5058
5059 words
5060 }
5061}
5062
5063pub struct WordsQuery<'a> {
5064 /// Only returns words with all chars from the fuzzy string in them.
5065 pub fuzzy_contents: Option<&'a str>,
5066 /// Skips words that start with a digit.
5067 pub skip_digits: bool,
5068 /// Buffer offset range, to look for words.
5069 pub range: Range<usize>,
5070}
5071
5072fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5073 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5074}
5075
5076fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5077 let mut result = IndentSize::spaces(0);
5078 for c in text {
5079 let kind = match c {
5080 ' ' => IndentKind::Space,
5081 '\t' => IndentKind::Tab,
5082 _ => break,
5083 };
5084 if result.len == 0 {
5085 result.kind = kind;
5086 }
5087 result.len += 1;
5088 }
5089 result
5090}
5091
5092impl Clone for BufferSnapshot {
5093 fn clone(&self) -> Self {
5094 Self {
5095 text: self.text.clone(),
5096 syntax: self.syntax.clone(),
5097 file: self.file.clone(),
5098 remote_selections: self.remote_selections.clone(),
5099 diagnostics: self.diagnostics.clone(),
5100 language: self.language.clone(),
5101 tree_sitter_data: self.tree_sitter_data.clone(),
5102 non_text_state_update_count: self.non_text_state_update_count,
5103 }
5104 }
5105}
5106
5107impl Deref for BufferSnapshot {
5108 type Target = text::BufferSnapshot;
5109
5110 fn deref(&self) -> &Self::Target {
5111 &self.text
5112 }
5113}
5114
5115unsafe impl Send for BufferChunks<'_> {}
5116
5117impl<'a> BufferChunks<'a> {
5118 pub(crate) fn new(
5119 text: &'a Rope,
5120 range: Range<usize>,
5121 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5122 diagnostics: bool,
5123 buffer_snapshot: Option<&'a BufferSnapshot>,
5124 ) -> Self {
5125 let mut highlights = None;
5126 if let Some((captures, highlight_maps)) = syntax {
5127 highlights = Some(BufferChunkHighlights {
5128 captures,
5129 next_capture: None,
5130 stack: Default::default(),
5131 highlight_maps,
5132 })
5133 }
5134
5135 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5136 let chunks = text.chunks_in_range(range.clone());
5137
5138 let mut this = BufferChunks {
5139 range,
5140 buffer_snapshot,
5141 chunks,
5142 diagnostic_endpoints,
5143 error_depth: 0,
5144 warning_depth: 0,
5145 information_depth: 0,
5146 hint_depth: 0,
5147 unnecessary_depth: 0,
5148 underline: true,
5149 highlights,
5150 };
5151 this.initialize_diagnostic_endpoints();
5152 this
5153 }
5154
5155 /// Seeks to the given byte offset in the buffer.
5156 pub fn seek(&mut self, range: Range<usize>) {
5157 let old_range = std::mem::replace(&mut self.range, range.clone());
5158 self.chunks.set_range(self.range.clone());
5159 if let Some(highlights) = self.highlights.as_mut() {
5160 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5161 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5162 highlights
5163 .stack
5164 .retain(|(end_offset, _)| *end_offset > range.start);
5165 if let Some(capture) = &highlights.next_capture
5166 && range.start >= capture.node.start_byte()
5167 {
5168 let next_capture_end = capture.node.end_byte();
5169 if range.start < next_capture_end {
5170 highlights.stack.push((
5171 next_capture_end,
5172 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5173 ));
5174 }
5175 highlights.next_capture.take();
5176 }
5177 } else if let Some(snapshot) = self.buffer_snapshot {
5178 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5179 *highlights = BufferChunkHighlights {
5180 captures,
5181 next_capture: None,
5182 stack: Default::default(),
5183 highlight_maps,
5184 };
5185 } else {
5186 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5187 // Seeking such BufferChunks is not supported.
5188 debug_assert!(
5189 false,
5190 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5191 );
5192 }
5193
5194 highlights.captures.set_byte_range(self.range.clone());
5195 self.initialize_diagnostic_endpoints();
5196 }
5197 }
5198
5199 fn initialize_diagnostic_endpoints(&mut self) {
5200 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5201 && let Some(buffer) = self.buffer_snapshot
5202 {
5203 let mut diagnostic_endpoints = Vec::new();
5204 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5205 diagnostic_endpoints.push(DiagnosticEndpoint {
5206 offset: entry.range.start,
5207 is_start: true,
5208 severity: entry.diagnostic.severity,
5209 is_unnecessary: entry.diagnostic.is_unnecessary,
5210 underline: entry.diagnostic.underline,
5211 });
5212 diagnostic_endpoints.push(DiagnosticEndpoint {
5213 offset: entry.range.end,
5214 is_start: false,
5215 severity: entry.diagnostic.severity,
5216 is_unnecessary: entry.diagnostic.is_unnecessary,
5217 underline: entry.diagnostic.underline,
5218 });
5219 }
5220 diagnostic_endpoints
5221 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5222 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5223 self.hint_depth = 0;
5224 self.error_depth = 0;
5225 self.warning_depth = 0;
5226 self.information_depth = 0;
5227 }
5228 }
5229
5230 /// The current byte offset in the buffer.
5231 pub fn offset(&self) -> usize {
5232 self.range.start
5233 }
5234
5235 pub fn range(&self) -> Range<usize> {
5236 self.range.clone()
5237 }
5238
5239 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5240 let depth = match endpoint.severity {
5241 DiagnosticSeverity::ERROR => &mut self.error_depth,
5242 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5243 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5244 DiagnosticSeverity::HINT => &mut self.hint_depth,
5245 _ => return,
5246 };
5247 if endpoint.is_start {
5248 *depth += 1;
5249 } else {
5250 *depth -= 1;
5251 }
5252
5253 if endpoint.is_unnecessary {
5254 if endpoint.is_start {
5255 self.unnecessary_depth += 1;
5256 } else {
5257 self.unnecessary_depth -= 1;
5258 }
5259 }
5260 }
5261
5262 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5263 if self.error_depth > 0 {
5264 Some(DiagnosticSeverity::ERROR)
5265 } else if self.warning_depth > 0 {
5266 Some(DiagnosticSeverity::WARNING)
5267 } else if self.information_depth > 0 {
5268 Some(DiagnosticSeverity::INFORMATION)
5269 } else if self.hint_depth > 0 {
5270 Some(DiagnosticSeverity::HINT)
5271 } else {
5272 None
5273 }
5274 }
5275
5276 fn current_code_is_unnecessary(&self) -> bool {
5277 self.unnecessary_depth > 0
5278 }
5279}
5280
5281impl<'a> Iterator for BufferChunks<'a> {
5282 type Item = Chunk<'a>;
5283
5284 fn next(&mut self) -> Option<Self::Item> {
5285 let mut next_capture_start = usize::MAX;
5286 let mut next_diagnostic_endpoint = usize::MAX;
5287
5288 if let Some(highlights) = self.highlights.as_mut() {
5289 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5290 if *parent_capture_end <= self.range.start {
5291 highlights.stack.pop();
5292 } else {
5293 break;
5294 }
5295 }
5296
5297 if highlights.next_capture.is_none() {
5298 highlights.next_capture = highlights.captures.next();
5299 }
5300
5301 while let Some(capture) = highlights.next_capture.as_ref() {
5302 if self.range.start < capture.node.start_byte() {
5303 next_capture_start = capture.node.start_byte();
5304 break;
5305 } else {
5306 let highlight_id =
5307 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5308 highlights
5309 .stack
5310 .push((capture.node.end_byte(), highlight_id));
5311 highlights.next_capture = highlights.captures.next();
5312 }
5313 }
5314 }
5315
5316 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5317 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5318 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5319 if endpoint.offset <= self.range.start {
5320 self.update_diagnostic_depths(endpoint);
5321 diagnostic_endpoints.next();
5322 self.underline = endpoint.underline;
5323 } else {
5324 next_diagnostic_endpoint = endpoint.offset;
5325 break;
5326 }
5327 }
5328 }
5329 self.diagnostic_endpoints = diagnostic_endpoints;
5330
5331 if let Some(ChunkBitmaps {
5332 text: chunk,
5333 chars: chars_map,
5334 tabs,
5335 }) = self.chunks.peek_with_bitmaps()
5336 {
5337 let chunk_start = self.range.start;
5338 let mut chunk_end = (self.chunks.offset() + chunk.len())
5339 .min(next_capture_start)
5340 .min(next_diagnostic_endpoint);
5341 let mut highlight_id = None;
5342 if let Some(highlights) = self.highlights.as_ref()
5343 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5344 {
5345 chunk_end = chunk_end.min(*parent_capture_end);
5346 highlight_id = Some(*parent_highlight_id);
5347 }
5348 let bit_start = chunk_start - self.chunks.offset();
5349 let bit_end = chunk_end - self.chunks.offset();
5350
5351 let slice = &chunk[bit_start..bit_end];
5352
5353 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5354 let tabs = (tabs >> bit_start) & mask;
5355 let chars = (chars_map >> bit_start) & mask;
5356
5357 self.range.start = chunk_end;
5358 if self.range.start == self.chunks.offset() + chunk.len() {
5359 self.chunks.next().unwrap();
5360 }
5361
5362 Some(Chunk {
5363 text: slice,
5364 syntax_highlight_id: highlight_id,
5365 underline: self.underline,
5366 diagnostic_severity: self.current_diagnostic_severity(),
5367 is_unnecessary: self.current_code_is_unnecessary(),
5368 tabs,
5369 chars,
5370 ..Chunk::default()
5371 })
5372 } else {
5373 None
5374 }
5375 }
5376}
5377
5378impl operation_queue::Operation for Operation {
5379 fn lamport_timestamp(&self) -> clock::Lamport {
5380 match self {
5381 Operation::Buffer(_) => {
5382 unreachable!("buffer operations should never be deferred at this layer")
5383 }
5384 Operation::UpdateDiagnostics {
5385 lamport_timestamp, ..
5386 }
5387 | Operation::UpdateSelections {
5388 lamport_timestamp, ..
5389 }
5390 | Operation::UpdateCompletionTriggers {
5391 lamport_timestamp, ..
5392 }
5393 | Operation::UpdateLineEnding {
5394 lamport_timestamp, ..
5395 } => *lamport_timestamp,
5396 }
5397 }
5398}
5399
5400impl Default for Diagnostic {
5401 fn default() -> Self {
5402 Self {
5403 source: Default::default(),
5404 source_kind: DiagnosticSourceKind::Other,
5405 code: None,
5406 code_description: None,
5407 severity: DiagnosticSeverity::ERROR,
5408 message: Default::default(),
5409 markdown: None,
5410 group_id: 0,
5411 is_primary: false,
5412 is_disk_based: false,
5413 is_unnecessary: false,
5414 underline: true,
5415 data: None,
5416 registration_id: None,
5417 }
5418 }
5419}
5420
5421impl IndentSize {
5422 /// Returns an [`IndentSize`] representing the given spaces.
5423 pub fn spaces(len: u32) -> Self {
5424 Self {
5425 len,
5426 kind: IndentKind::Space,
5427 }
5428 }
5429
5430 /// Returns an [`IndentSize`] representing a tab.
5431 pub fn tab() -> Self {
5432 Self {
5433 len: 1,
5434 kind: IndentKind::Tab,
5435 }
5436 }
5437
5438 /// An iterator over the characters represented by this [`IndentSize`].
5439 pub fn chars(&self) -> impl Iterator<Item = char> {
5440 iter::repeat(self.char()).take(self.len as usize)
5441 }
5442
5443 /// The character representation of this [`IndentSize`].
5444 pub fn char(&self) -> char {
5445 match self.kind {
5446 IndentKind::Space => ' ',
5447 IndentKind::Tab => '\t',
5448 }
5449 }
5450
5451 /// Consumes the current [`IndentSize`] and returns a new one that has
5452 /// been shrunk or enlarged by the given size along the given direction.
5453 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5454 match direction {
5455 Ordering::Less => {
5456 if self.kind == size.kind && self.len >= size.len {
5457 self.len -= size.len;
5458 }
5459 }
5460 Ordering::Equal => {}
5461 Ordering::Greater => {
5462 if self.len == 0 {
5463 self = size;
5464 } else if self.kind == size.kind {
5465 self.len += size.len;
5466 }
5467 }
5468 }
5469 self
5470 }
5471
5472 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5473 match self.kind {
5474 IndentKind::Space => self.len as usize,
5475 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5476 }
5477 }
5478}
5479
5480#[cfg(any(test, feature = "test-support"))]
5481pub struct TestFile {
5482 pub path: Arc<RelPath>,
5483 pub root_name: String,
5484 pub local_root: Option<PathBuf>,
5485}
5486
5487#[cfg(any(test, feature = "test-support"))]
5488impl File for TestFile {
5489 fn path(&self) -> &Arc<RelPath> {
5490 &self.path
5491 }
5492
5493 fn full_path(&self, _: &gpui::App) -> PathBuf {
5494 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5495 }
5496
5497 fn as_local(&self) -> Option<&dyn LocalFile> {
5498 if self.local_root.is_some() {
5499 Some(self)
5500 } else {
5501 None
5502 }
5503 }
5504
5505 fn disk_state(&self) -> DiskState {
5506 unimplemented!()
5507 }
5508
5509 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5510 self.path().file_name().unwrap_or(self.root_name.as_ref())
5511 }
5512
5513 fn worktree_id(&self, _: &App) -> WorktreeId {
5514 WorktreeId::from_usize(0)
5515 }
5516
5517 fn to_proto(&self, _: &App) -> rpc::proto::File {
5518 unimplemented!()
5519 }
5520
5521 fn is_private(&self) -> bool {
5522 false
5523 }
5524
5525 fn path_style(&self, _cx: &App) -> PathStyle {
5526 PathStyle::local()
5527 }
5528}
5529
5530#[cfg(any(test, feature = "test-support"))]
5531impl LocalFile for TestFile {
5532 fn abs_path(&self, _cx: &App) -> PathBuf {
5533 PathBuf::from(self.local_root.as_ref().unwrap())
5534 .join(&self.root_name)
5535 .join(self.path.as_std_path())
5536 }
5537
5538 fn load(&self, _cx: &App) -> Task<Result<String>> {
5539 unimplemented!()
5540 }
5541
5542 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5543 unimplemented!()
5544 }
5545}
5546
5547pub(crate) fn contiguous_ranges(
5548 values: impl Iterator<Item = u32>,
5549 max_len: usize,
5550) -> impl Iterator<Item = Range<u32>> {
5551 let mut values = values;
5552 let mut current_range: Option<Range<u32>> = None;
5553 std::iter::from_fn(move || {
5554 loop {
5555 if let Some(value) = values.next() {
5556 if let Some(range) = &mut current_range
5557 && value == range.end
5558 && range.len() < max_len
5559 {
5560 range.end += 1;
5561 continue;
5562 }
5563
5564 let prev_range = current_range.clone();
5565 current_range = Some(value..(value + 1));
5566 if prev_range.is_some() {
5567 return prev_range;
5568 }
5569 } else {
5570 return current_range.take();
5571 }
5572 }
5573 })
5574}
5575
5576#[derive(Default, Debug)]
5577pub struct CharClassifier {
5578 scope: Option<LanguageScope>,
5579 scope_context: Option<CharScopeContext>,
5580 ignore_punctuation: bool,
5581}
5582
5583impl CharClassifier {
5584 pub fn new(scope: Option<LanguageScope>) -> Self {
5585 Self {
5586 scope,
5587 scope_context: None,
5588 ignore_punctuation: false,
5589 }
5590 }
5591
5592 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5593 Self {
5594 scope_context,
5595 ..self
5596 }
5597 }
5598
5599 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5600 Self {
5601 ignore_punctuation,
5602 ..self
5603 }
5604 }
5605
5606 pub fn is_whitespace(&self, c: char) -> bool {
5607 self.kind(c) == CharKind::Whitespace
5608 }
5609
5610 pub fn is_word(&self, c: char) -> bool {
5611 self.kind(c) == CharKind::Word
5612 }
5613
5614 pub fn is_punctuation(&self, c: char) -> bool {
5615 self.kind(c) == CharKind::Punctuation
5616 }
5617
5618 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5619 if c.is_alphanumeric() || c == '_' {
5620 return CharKind::Word;
5621 }
5622
5623 if let Some(scope) = &self.scope {
5624 let characters = match self.scope_context {
5625 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5626 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5627 None => scope.word_characters(),
5628 };
5629 if let Some(characters) = characters
5630 && characters.contains(&c)
5631 {
5632 return CharKind::Word;
5633 }
5634 }
5635
5636 if c.is_whitespace() {
5637 return CharKind::Whitespace;
5638 }
5639
5640 if ignore_punctuation {
5641 CharKind::Word
5642 } else {
5643 CharKind::Punctuation
5644 }
5645 }
5646
5647 pub fn kind(&self, c: char) -> CharKind {
5648 self.kind_with(c, self.ignore_punctuation)
5649 }
5650}
5651
5652/// Find all of the ranges of whitespace that occur at the ends of lines
5653/// in the given rope.
5654///
5655/// This could also be done with a regex search, but this implementation
5656/// avoids copying text.
5657pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5658 let mut ranges = Vec::new();
5659
5660 let mut offset = 0;
5661 let mut prev_chunk_trailing_whitespace_range = 0..0;
5662 for chunk in rope.chunks() {
5663 let mut prev_line_trailing_whitespace_range = 0..0;
5664 for (i, line) in chunk.split('\n').enumerate() {
5665 let line_end_offset = offset + line.len();
5666 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5667 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5668
5669 if i == 0 && trimmed_line_len == 0 {
5670 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5671 }
5672 if !prev_line_trailing_whitespace_range.is_empty() {
5673 ranges.push(prev_line_trailing_whitespace_range);
5674 }
5675
5676 offset = line_end_offset + 1;
5677 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5678 }
5679
5680 offset -= 1;
5681 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5682 }
5683
5684 if !prev_chunk_trailing_whitespace_range.is_empty() {
5685 ranges.push(prev_chunk_trailing_whitespace_range);
5686 }
5687
5688 ranges
5689}